Hadoop-2.2.0伪分布式和hbase-0.96.0-hadoop2伪分布式搭建
(1)主机名
gedit /etc/hostname
master1
sudo gedit /etc/hosts
127.0.0.1 master1
(2)SSH
sudo apt-get install ssh
mkdir /home/hadoop/.ssh
ssh-keygen -t rsa
cat id_rsa.pub >> authorized_keys
ssh localhost
ssh master1
(3)环境变量
exportANT_HOME=/opt/apache-ant-1.9.3
exportMAVEN_HOME=/opt/apache-maven-3.0.5
exportFINDBUGS_HOME=/opt/findbugs-2.0.2
exportPATH=${ANT_HOME}/bin:${MAVEN_HOME}/bin:${FINDBUGS_HOME}/bin:$PATH
#jdk
exportJAVA_HOME=/opt/jdk1.7.0_45
exportJRE_HOME=/opt/jdk1.7.0_45
exportJRE_HOME=/opt/jdk1.7.0_45/jre
exportCLASSPATH=.:$JAVA_HOME/lib:$JRE_HOME/lib:$CLASSPATH
exportPATH=$JAVA_HOME/bin:$JRE_HOME/bin:$PATH
#hadoop
exportHADOOP_HOME=/opt/hadoop-2.2.0
exportHADOOP_CONF_DIR=${HADOOP_HOME}/etc/hadoop
exportPATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
#hbase
exportHBASE_HOME=/opt/hbase-0.96.0-hadoop2
exportPATH=$PATH:$HBASE_HOME/bin
(4)放置
/opt/jdk1.7.0_45
/opt/hadoop-2.2.0
/opt/hbase-0.96.0-hadoop2
(5)本地目录创建(根据下面的配置文件)
mkdir -p /data/tmp_hadoop /data/hdfs/nn /data/hdfs/dn /data/tmp_hbase /data/yarn/local /data/yarn/logs/data/log/hadoop-hdfs /data/log/hadoop-yarn /data/log/hadoop-mapred
cd /opt/hadoop-2.2.0/etc/hadoop
(1) core-site.xml
(2)hadoop-env.sh
export JAVA_HOME=/opt/jdk1.7.0_45
export HADOOP_LOG_DIR=/data/log/hadoop-hdfs
export YARN_LOG_DIR=/data/log/hadoop-yarn
exportHADOOP_MAPRED_LOG_DIR=/data/log/hadoop-mapred
(3) hdfs-site.xml
(4) mapred-site.xml
(5) masters
localhost
(6) slaves
localhost
(7) yarn-env.sh
export JAVA_HOME=/opt/jdk1.7.0_45
(8) yarn-site.xml
(9) 启动hadoop
sudo chown -R hadoop:hadoop hadoop-2.2.0/
hadoop namenode -format
start-dfs.sh
jps一下
NameNode
DataNode
SecondaryNameNode
localhost:50070
satrt-yarn.sh
jps一下:
ResourceManager
NodeManager
localhost:8088
mr-jobhistory-daemon.sh start historyserver
jps一下:
JobHistoryServer
localhost:19888
(1)hbase-env.sh
export JAVA_HOME=/opt/jdk1.7.0_45
exportPATH=$JAVA_HOME/bin:$JAVA_HOME/jre:$PATH
export HBASE_MANAGES_ZK=true
export HBASE_CLASSPATH=/opt/hadoop-2.2.0/etc/hadoop
export HBASE_LOG_DIR=/data/hbase/logs
(2)hbase-site.xml
(3)regionservers
localhost
(4)启动hbase
sudo chown -R hadoop:hadoop hbase-0.96.0-hadoop2/
start-hbase.sh
jps一下:
HQuorumPeer
HRegionServer
HMaster
访问:localhost:60010