hadoop 伪分布式部署过程

1. 配置PATH

#vi /etc/profile

export JAVA_HOME=/usr/java/jdk1.6.0_32
export PATH="$JAVA_HOME/bin:$PATH"
export HADOOP_PREFIX=/home/hadoop/hadoop-2.4.0
export CLASSPATH=".:$JAVA_HOME/lib:$CLASSPATH"
export PATH="$JAVA_HOME/:$HADOOP_PREFIX/bin:$PATH"
export HADOOP_PREFIX PATH CLASSPATH
export LD_LIBRARY_PATH=$HADOOP_PREFIX/lib/native/

#source .bash_profile

#echo $PATH

2.配置hadoop

1) hadoop-env.sh:

export JAVA_HOME=/usr/java/jdk1.7.0_60
export HADOOP_COMMON_LIB_NATIVE_DIR=${HADOOP_PREFIX}/lib/native
export HADOOP_OPTS="-Djava.library.path=$HADOOP_PREFIX/lib"
2) core-site.xml :

        
                fs.default.name
                hdfs://localhost:9000
        
        
                dfs.namenode.name.dir
                file:/home/hadoop/hadoop-2.4.0/dfs/name
        
        
                dfs.datanode.data.dir
                file:/home/hadoop/hadoop-2.4.0/dfs/data
        
3) hdfs-site.xml :

        
        
                dfs.replication
                1
        
        
                dfs.permissions
                false
        
        
                dfs.namenode.name.dir
                file:/home/hadoop/hadoop-2.4.0/dfs/name
        
        
                dfs.datanode.data.dir
                file:/home/hadoop//hadoop-2.4.0/dfs/data
        
4) mapred-site.xml.template :

        
                mapreduce.jobtracker.address
                localhost:9001
        
5) yarn-site.xml:

        
        
                mapreduce.framework.name
                yarn
        
        
                yarn.nodemanager.aux-services
                mapreduce_shuffle
        
3. ssh免密码登陆(root用户)

#ssh-keygen -t rsa -P ''

#cat /root/.ssh/id_rsa.pub >> /root/.ssh/authorized_keys

4.格式化hdfs:

#./bin/hadoop namenode -format

5.启动hadoop:

#./sbin/start-all.sh

6. 关闭hadoop:

#./sbin/stop-all.sh

7. IE访问:

overview --------> http://localhost:50070

applications ----> http://localhost:8088

注:

基本启动的进程如下:

SecondaryNameNode
DataNode
NodeManager
Jps
ResourceManager
NameNode


你可能感兴趣的:(Hadoop)