1. 下载jdk和hadoop
- 下载jdk-7u7-linux-i586.tar.gz和hadoop-2.6.0.tar.gz
http://www.apache.org/dyn/closer.cgi/hadoop/common/
http://www.oracle.com/technetwork/cn/java/javase/downloads/jdk7-downloads-1880260.html
sudo tar zxvf jdk-7u7-linux-i586.tar.gz /home
sudo tar zxvf hadoop-2.6.0.tar.gz /home/hadoop001
2. 配置环境变量
sudo gedit /etc/profile
- 把jdk和hadoop路径加入到profile文件
export JAVA_HOME=/home/jdk1.7.0_07
export HADOOP_HOME=/home/hadoop001/hadoop-2.6.0
export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
export CLASSPATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar:
export PATH=$PATH:$JAVA_HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin:
source /etc/profile
java version
hadoop version
3. 配置hadoop
- /home/hadoop001/hadoop-2.6.0/etc/hadoop/hadoop-env.sh
#export JAVA_HOME=${JAVA_HOME}
export JAVA_HOME=/home/jdk1.7.0_07
- /home/hadoop001/hadoop-2.6.0/etc/hadoop/yarn-env.sh
export JAVA_HOME=/home/jdk1.7.0_07
- /home/hadoop001/hadoop-2.6.0/etc/hadoop/core-site.xml
<configuration>
<property>
<name>hadoop.tmp.dirname>
<value>/home/hadoop001/hadoop2data/tmpvalue>
<description>description>
property>
<property>
<name>fs.defaultFSname>
<value>hdfs://192.168.1.242:9000value>
property>
<property>
<name>io.file.buffer.sizename>
<value>4096value>
property>
configuration>
- /home/hadoop001/hadoop-2.6.0/etc/hadoop/mapred-site.xml
<configuration>
<property>
<name>mapreduce.framework.namename>
<value>yarnvalue>
<final>truefinal>
property>
<property>
<name>mapreduce.jobhistory.addressname>
<value>192.168.1.242:10020value>
property>
<property>
<name>mapreduce.jobhistory.webapp.addressname>
<value>192.168.1.242:19888value>
property>
configuration>
- /home/hadoop001/hadoop-2.6.0/etc/hadoop/hdfs-site.xml
<configuration>
<property>
<name>dfs.namenode.name.dirname>
<value>/home/hadoop001/hadoop2data/namevalue>
property>
<property>
<name>dfs.datanode.data.dirname>
<value>/home/hadoop001/hadoop2data/datavalue>
property>
<property>
<name>dfs.replicationname>
<value>1value>
<description>这个地方小心了,数值要小于集群机器,单机填1就好了description>
property>
<property>
<name>dfs.nameservicesname>
<value>hadoop-cluster1value>
property>
<property>
<name>dfs.namenode.secondary.http-addressname>
<value>192.168.1.242:50090value>
property>
<property>
<name>dfs.webhdfs.enabledname>
<value>truevalue>
property>
configuration>
- /home/hadoop001/hadoop-2.6.0/etc/hadoop/yarn-site.xml
<configuration>
<property>
<name>yarn.resourcemanager.hostnamename>
<value>192.168.1.242value>
property>
<property>
<name>yarn.nodemanager.aux-servicesname>
<value>mapreduce_shufflevalue>
property>
<property>
<name>yarn.resourcemanager.addressname>
<value>192.168.1.242:8032value>
property>
<property>
<name>yarn.resourcemanager.scheduler.addressname>
<value>192.168.1.242:8030value>
property>
<property>
<name>yarn.resourcemanager.resource-tracker.addressname>
<value>192.168.1.242:8031value>
property>
<property>
<name>yarn.resourcemanager.admin.addressname>
<value>192.168.1.242:8033value>
property>
<property>
<name>yarn.resourcemanager.webapp.addressname>
<value>192.168.1.242:8088value>
property>
<property>
<name>yarn.log.dirname>
<value>/home/hadoop001/hadoop2data/yarn_logvalue>
property>
configuration>
hdfs namenode -format
hadoop namenode -format
start-all.sh
stop-all.sh
4. 遇到的问题
- http://192.168.1.242:8088可以访问
http://192.168.1.242:50070无法访问
我碰到这个问题是因为没有使用命令格式化文件系统,使用: hadoop namenode -format格式化后就可以访问了
- /lib/ld-linux.so.2: bad ELF interpreter No such file or directory解决
是因为64位系统中安装了32位程序
解决方法: yum install glibc.i686
- yum出错Error: Cannot find a valid baseurl for repo: base
sudo gedit /etc/resolv.conf
在文件最后配置: nameserver 8.8.8.8
chmod 777 /etc/sudoers
在root用户下面添加你的用户的权限
root ALL=(ALL) ALL
你的用户名 ALL=(ALL) ALL
chmod 440 /etc/sudoers