由于使用linux的是64位的,但是hadoop的native 是32位的,所以需要下载hadoop的源码包进行本地编译
需要安装一些软件:
# 安裝基本套件
[root@localhost ~]# su root
[root@localhost ~]# yum -y install gcc gcc-c++ svn cmake git zlib zlib-devel openssl openssl-devel rsync
或
[root@localhost ~]# yum -y groupinstall 'Development Tools'
[root@localhost ~]# yum -y install cmake zlib-devel openssl openssl-devel rsync
安装 jdk 和 maven
# 安裝 Apache Maven 3.1.1
cd /usr/local/src
wget http://ftp.tc.edu.tw/pub/Apache/maven/maven-3/3.1.1/binaries/apache-maven-3.1.1-bin.tar.gz
tar zxvf apache-maven-3.1.1-bin.tar.gz -C /usr/local
ln -s /usr/local/apache-maven-3.1.1/bin/mvn /usr/bin/mvn
# 安裝 FindBugs
cd /usr/local/src
wget http://prdownloads.sourceforge.net/findbugs/findbugs-2.0.2.tar.gz?download
tar zxvf findbugs-2.0.2.tar.gz -C /usr/local/
ln -s /usr/local/findbugs-2.0.2/bin/findbugs /usr/bin/findbugs
# 安裝 Protoc 2.5.0
cd /usr/local/src
wget https://protobuf.googlecode.com/files/protobuf-2.5.0.tar.gz
tar zxvf protobuf-2.5.0.tar.gz -C /usr/local/src
cd /usr/local/src/protobuf-2.5.0
./configure
make
make install
ln -s /usr/local/bin/protoc /usr/bin/protoc
修改 pom.xml:
hadoop-2.2.0-src/hadoop-common-project/hadoop-auth
<dependency>
63 <groupId>org.mortbay.jetty</groupId>
64 <artifactId>jetty-util</artifactId>
65 <scope>test</scope>
66 </dependency>
hadoop-2.2.0-src/hadoop-common-project
<dependencies>
41 <dependency>
42 <groupId>org.mortbay.jetty</groupId>
43 <artifactId>jetty-util</artifactId>
44 <scope>test</scope>
45 </dependency>
46 <dependency>
47 <groupId>org.mortbay.jetty</groupId>
48 <artifactId>jetty</artifactId>
49 <scope>test</scope>
50 </dependency>
51
52 </dependencies>
/root/java/hadoop-2.2.0-src/hadoop-dist/target
hadoop-2.2.0 将是一个适合64位的文件,直接copy出来就可以使用
安装:
修改hostname:
Fedora 9中hostname命令根本不会读取/etc/sysconfig/network文件,它读取的是/etc/hostname文件
辅助操作:/etc/rc.d/init.d/network restart 重新启动网络服务
systemctl stop iptables.service 关闭防火墙
修改 etc/profile:
export TERM=xterm-color
78 export GOROOT=/root/go
79 export PATH=$PATH:$GOROOT/bin
80 JAVA_HOME=/root/java/jdk
81 CLASSPATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar
82 PATH=$JAVA_HOME/bin:$PATH:$GOROOT/bin
83 MAVEN_HOME=/root/java/maven
84 PATH=${PATH}:${MAVEN_HOME}/bin
85
86 export HADOOP_PREFIX=/root/java/hadoop-2.2.0
87 export PATH=$PATH:$HADOOP_PREFIX/bin
88 export PATH=$PATH:$HADOOP_PREFIX/sbin
89 export HADOOP_MAPRED_HOME=${HADOOP_PREFIX}
90 export HADOOP_COMMON_HOME=${HADOOP_PREFIX}
91 export HADOOP_HDFS_HOME=${HADOOP_PREFIX}
92 export YARN_HOME=${HADOOP_PREFIX}
93 export HADOOP_CONF_DIR=${HADOOP_PREFIX}/etc/hadoop
94 export YARN_CONF_DIR=${HADOOP_PREFIX}/etc/hadoop
95
96 export JAVA_HOME CLASSPATH PATH MAVEN_HOME
修改:/etc/hosts:
0.0.0.0 localhost localhost.localdomain localhost4 localhost4.localdomain4
因为加入使用虚拟机的话,127.0.0.1 无法被外部访问
0.0.0.0 是所有ip的集合
hadoop namenode -format
启动 start-dfs.sh start-yarn.sh
[root@localhost target]# jps
2883 ResourceManager
2697 SecondaryNameNode
2493 DataNode
2980 NodeManager
2348 NameNode
3587 Jps