操作系统:ubuntu-14.04.1-x64(下载、安装过程略)
jdk:jdk1.8.0_60(下载、安装过程略)
hadoop:hadoop-2.7.1,下载地址:http://www.apache.org/dyn/closer.cgi/hadoop/common/hadoop-2.7.1/hadoop-2.7.1.tar.gz
apt-get install ssh apt-get install openssh-server ssh start
ssh -keygen -t rsa -P "" cat ~/.ssh/id_rsa.put >> ~/.ssh/authorized_keys
ssh localhost
tar -zxvf /home/adrianlynn/Downloads/software/hadoop-2.7.1.tar.gz -C /opt mv /opt/hadoop-2.7.1 /opt/hadoop
export HADOOP_HOME=/opt/hadoop
export PATH=${HADOOP_HOME}/bin:$PATH
export JAVA_HOME=/opt/jdk1.8.0_60
export PATH=$PATH:/opt/hadoop/bin
export JAVA_HOME=/opt/jdk1.8.0_60
source $HADOOP_HOME/etc/hadoop/hadoop-env.sh source $HADOOP_HOME/etc/hadoop/yarn-env.sh
root@Demon:~# hadoop version Hadoop 2.7.1 Subversion https://git-wip-us.apache.org/repos/asf/hadoop.git -r 15ecc87ccf4a0228f35af08fc56de536e6ce657a Compiled by jenkins on 2015-06-29T06:04Z Compiled with protoc 2.5.0 From source with checksum fc0a1a23fc1868e4d5ee7fa2b28a58a This command was run using /opt/hadoop/share/hadoop/common/hadoop-common-2.7.1.jar
mkdir input cp -R $HADOOP_HOME/etc/hadoop/* $HADOOP_HOME/input cd $HADOOP_HOME hadoop jar hadoop-examples-1.2.0.jar wordcount input output
mkdir -p $HADOOP_HOME/hdfs/name mkdir -p $HADOOP_HOME/hdfs/data
<configuration> <property> <name>fs.defaultFS</name> <value>hdfs://Demon:9000</value> </property> <property> <name>hadoop.tmp.dir</name> <value>file:/opt/hadoop/tmp</value> </property> </configuration>
<configuration> <property> <name>dfs.datanode.ipc.address</name> <value>Demon:50020</value> </property> <property> <name>dfs.datanode.http.address</name> <value>Demon:50075</value> </property> <property> <name>dfs.namenode.name.dir</name> <value>file:/opt/hadoop/hdfs/name</value> </property> <property> <name>dfs.datanode.data.dir</name> <value>file:/opt/hadoop/hdfs/data</value> </property> <property> <name>dfs.namenode.secondary.http-address</name> <value>Demon:50090</value> </property>
<configuration> <property> <name>mapreduce.framework.name</name> <value>yarn</value> </property> </configuration>
<configuration> <property> <name>yarn.nodemanager.aux-services</name> <value>mapreduce_shuffle</value> </property> </configuration>
15/09/13 11:19:55 INFO common.Storage: Storage directory /opt/hadoop/hdfs/name has been successfully formatted. 15/09/13 11:19:55 INFO namenode.NNStorageRetentionManager: Going to retain 1 images with txid >= 0 15/09/13 11:19:55 INFO util.ExitUtil: Exiting with status 0 15/09/13 11:19:55 INFO namenode.NameNode: SHUTDOWN_MSG: /************************************************************ SHUTDOWN_MSG: Shutting down NameNode at Demon.Lucifer/127.0.1.1 ************************************************************/
root@Demon:/opt/hadoop# ./sbin/start-all.sh This script is Deprecated. Instead use start-dfs.sh and start-yarn.sh Starting namenodes on [Demon] Demon: starting namenode, logging to /opt/hadoop/logs/hadoop-root-namenode-Demon.out localhost: starting datanode, logging to /opt/hadoop/logs/hadoop-root-datanode-Demon.out Starting secondary namenodes [Demon] Demon: starting secondarynamenode, logging to /opt/hadoop/logs/hadoop-root-secondarynamenode-Demon.out starting yarn daemons starting resourcemanager, logging to /opt/hadoop/logs/yarn-root-resourcemanager-Demon.out localhost: starting nodemanager, logging to /opt/hadoop/logs/yarn-root-nodemanager-Demon.out
root@Demon:/opt/hadoop# jps 27056 Jps 26805 ResourceManager 26614 SecondaryNameNode 26950 NodeManager 26247 NameNode 26395 DataNode
cd $HADOOP_HOME hadoop dfs -mkdir /input hadoop dfs -copyFromLocal /opt/hadoop/etc/hadoop/* /input hadoop jar hadoop-examples-1.2.0.jar wordcount /input /output