su root
adduser hadoop
su root
visudo
visudo打开的是
/etc/sudoers
文件,修改该文件,在root ALL=(ALL:ALL) ALL
这一行下面加入一行:
hadoop ALL=(ALL:ALL) ALL
CTRL+O(然后再按ENTER)保存,CTRL+C取消,CTRL+X退出
exit
sudo apt-get update
sudo apt-get install openssh-server
ssh localhost
exit
cd ~/.ssh/
ssh-keygen -t rsa
cat id_rsa.pub >> authorized_keys
执行如下命令下载并安装vim:
sudo apt-get install vim
sudo apt-get install openjdk-8-jdk
java -version
gedit ~/.bashrc
export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64
export JRE_HOME=${JAVA_HOME}/jre
export CLASSPATH=.:${JAVA_HOME}/lib:${JRE_HOME}/lib
export PATH=${JAVA_HOME}/bin:$PATH
source ~/.bashrc
echo $JAVA_HOME
java -version
whereis java
hadoop下载网址:https://hadoop.apache.org/releases.html (速度较慢)
可以使用这个网址:http://archive.apache.org/dist/hadoop/core/hadoop-3.2.4/ (可以选择需要的版本)
sudo tar -zxf ~/下载/hadoop-3.2.4.tar.gz -C /usr/local
cd /usr/local/
sudo mv ./hadoop-3.2.4/ ./hadoop
sudo chown -R hadoop ./hadoop
gedit ~/.bashrc
#HADOOP VARIABLES START
export HADOOP_INSTALL=/usr/local/hadoop
export PATH=$PATH:$HADOOP_INSTALL/bin
export PATH=$PATH:$HADOOP_INSTALL/sbin
export HADOOP_MAPRED_HOME=$HADOOP_INSTALL
export HADOOP_COMMON_HOME=$HADOOP_INSTALL
export HADOOP_HDFS_HOME=$HADOOP_INSTALL
export YARN_HOME=$HADOOP_INSTALL
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_INSTALL/lib/native
export HADOOP_OPTS="-Djava.library.path=$HADOOP_INSTALL/lib"
#HADOOP VARIABLES END
source ~/.bashrc
hadoop version
若如上图所示,hadoop已安装完成!
gedit ./etc/hadoop/hadoop-env.sh
# The java implementation to use.
export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64
export HADOOP=/usr/local/hadoop
export PATH=$PATH:/usr/local/hadoop/bin
gedit ./etc/hadoop/yarn-env.sh
# export JAVA_HOME
JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64
gedit ./etc/hadoop/core-site.xml
<configuration>
<property>
<name>hadoop.tmp.dir</name>
<value>file:/usr/local/hadoop/tmp</value>
<description>Abase for other temporary directories.</description>
</property>
<property>
<name>fs.defaultFS</name>
<value>hdfs://localhost:9000</value>
</property>
</configuration>
gedit ./etc/hadoop/hdfs-site.xml
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>file:/usr/local/hadoop/tmp/dfs/name</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>file:/usr/local/hadoop/tmp/dfs/data</value>
</property>
</configuration>
gedit ./etc/hadoop/yarn-site.xml
<configuration>
<!-- Site specific YARN configuration properties -->
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
<property>
<name>yarn.nodemanager.aux-services.mapreduce.shuffle.class</name>
<value>org.apache.hadoop.mapred.ShuffleHandler</value>
</property>
<property>
<name>yarn.resourcemanager.address</name>
<value>127.0.0.1:8032</value>
</property>
<property>
<name>yarn.resourcemanager.scheduler.address</name>
<value>127.0.0.1:8030</value>
</property>
<property>
<name>yarn.resourcemanager.resource-tracker.address</name>
<value>127.0.0.1:8031</value>
</property>
</configuration>
hadoop version
hdfs namenode -format
start-all.sh
jps
输入:http://localhost:9870/
输入:http://localhost:8088/