a.下载hadoop:hadoop下载地址
b.必须先安装好Java,参考:java安装
c.必须先安装好SSH服务,SSH服务安装:
$ sudo apt-get install ssh
$ sudo apt-get install rsync
tar -zxvf hadoop-2.7.3.tar.gz
a. vi /opt/hadoop-2.7.3/etc/hadoop/core-site.xml
<configuration>
<property>
<name>fs.defaultFSname>
<value>hdfs://localhost:9000value>
property>
configuration>
b. vi /opt/hadoop-2.7.3/etc/hadoop/hdfs-site.xml
<configuration>
<property>
<name>dfs.replicationname>
<value>1value>
property>
configuration>
c. vi /opt/hadoop-2.7.3/etc/hadoop/hadoop-env.sh
修改JAVA_HOME, HADOOP_CONF_DIR
# The java implementation to use.
export JAVA_HOME=/opt/jdk1.8.0_101
# The jsvc implementation to use. Jsvc is required to run secure datanodes
# that bind to privileged ports to provide authentication of data transfer
# protocol. Jsvc is not required if SASL is configured for authentication of
# data transfer protocol using non-privileged ports.
#export JSVC_HOME=${JSVC_HOME}
export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/opt/hadoop-2.7.3/etc/hadoop"}
d. vi /opt/hadoop-2.7.3/etc/hadoop/mapred-site.xml
<configuration>
<property>
<name>mapred.job.trackername>
<value>hdfs://localhost:9001value>
property>
configuration>
a. 使用ssh-keygen生成秘钥对
b.复制到信任用户中
cat $HOME/.ssh/id_rsa.pub >> $HOME/.ssh/authorized_keys
vi /etc/profile
export HADOOP_HOME=/opt/hadoop-2.7.3
source /etc/profile
$HADOOP_HOME/bin/hdfs namenode -format
$HADOOP_HOME/sbin/start-dfs.sh
http://hadoop.apache.org/docs/r2.7.3/hadoop-project-dist/hadoop-common/SingleCluster.html