注:前置要求已安装java。
wget http://mirror.bit.edu.cn/apache/hadoop/common/hadoop-3.2.2/hadoop-3.2.2.tar.gz
mkdir -p /data/hadoop
tar -xzvf hadoop-3.2.2.tar.gz -C /data/hadoop/
sudo groupadd hadoop && sudo useradd -g hadoop hadoop && sudo passwd hadoop
cd /data/hadoop/
chown -R hadoop:hadoop hadoop-3.2.2
vim /etc/profile
江下面内容写入profile
## HADOOP env variables
export HADOOP_HOME=/data/hadoop/hadoop-3.2.2
export HADOOP_COMMON_HOME=$HADOOP_HOME
export HADOOP_HDFS_HOME=$HADOOP_HOME
export HADOOP_MAPRED_HOME=$HADOOP_HOME
export HADOOP_YARN_HOME=$HADOOP_HOME
export HADOOP_OPTS="-Djava.library.path=$HADOOP_HOME/lib/native"
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export HIVE_HOME=/data/hadoop/apache-hive-3.1.2-bin
export SCALA_HOME=/data/hadoop/scala-2.11.8
export FLINK_HOEM=/data/flink/flink
export PATH=$PATH:$HADOOP_HOME/sbin:$HADOOP_HOME/bin:$HIVE_HOME/bin:$SCALA_HOME/bin:$FLINK_HOEM/bin
vim /data/hadoop/hadoop-3.2.2/etc/hadoop/yarn-site.xml
#讲下列内容写入
yarn.resourcemanager.hostname
10.0.10.188
yarn.nodemanager.aux-services
mapreduce_shuffle
yarn.nodemanager.env-whitelist
JAVA_HOME,HADOOP_COMMON_HOME,HADOOP_HDFS_HOME,HADOOP_CONF_DIR,CLASSPATH_PREPEND_DISTCACHE,HADOOP_YARN_HOME,HADOOP_HOME,PATH,LANG,TZ,HADOOP_MAPRED_HOME
vim /data/hadoop/hadoop-3.2.2/etc/hadoop/core-site.xml
#讲下列内容写入
fs.defaultFS
hdfs://10.0.10.188:8020
hadoop.tmp.dir
/data/hadoop/hadoop-3.2.2/tmp
hadoop.native.lib
false
Should native hadoop libraries, if present, be used.
hadoop.proxyuser.hadoop.hosts
*
hadoop.proxyuser.hadoop.groups
*
vim /data/hadoop/hadoop-3.2.2/etc/hadoop/hdfs-site.xml
#讲下列内容写入
dfs.replication
1
dfs.secondary.http.address
10.0.10.188:50090
mapreduce.framework.name
yarn
mapreduce.application.classpath
$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*:$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*
cd /data/hadoop/hadoop-3.2.2/sbin
#启动
./start-all.sh
#停止
stop-all.sh
hadoop:http://10.0.10.188:9870/
yarn:http://10.0.10.188:8088/cluster
wget http://mirror.bit.edu.cn/apache/hive/hive-3.1.2/apache-hive-3.1.2-bin.tar.gz
2.解压
tar -xzvf apache-hive-2.3.5-bin.tar.gz -C /data/hadoop/
3.修改用户和组
cd /data/hadoop/
chown -R hadoop:hadoopp apache-hive-3.1.2-bin
4.添加环境变量
在1.5添加环境变量的时候就已经添加好了。
5. 将mysql-connector-java放进lib
cp /usr/share/java/mysql-connector-java-5.1.48.jar /data/hadoop/apache-hive-3.1.2-bin/lib && cd /data/hadoop/apache-hive-3.1.2-bin/lib && sudo chown hadoop:hadoop mysql-connector-java-5.1.48.jar
mysql-connector-java.jar自行下载合适版本
链接:https://pan.baidu.com/s/1DRmW9nylw3-0vbcvvVrczQ
提取码:1234
6.编辑hive-site.xml,如果没有,需要创建文件
cd /data/hadoop/apache-hive-3.1.2-bin/conf
vim hive-site.xml
#将下列内容加入
hive.metastore.warehouse.dir
/data/hadoop/apache-hive-3.1.2-bin/warehouse
javax.jdo.option.ConnectionURL
jdbc:mysql://10.0.10.76:3306/hive_db?useSSL=false&serverTimezone=Asia/Shanghai
javax.jdo.option.ConnectionDriverName
com.mysql.jdbc.Driver
javax.jdo.option.ConnectionUserName
root
javax.jdo.option.ConnectionPassword
root
hive.querylog.location
/data/hadoop/apache-hive-3.1.2-bin/log
hive.server2.webui.host
10.0.10.188
hive.server2.webui.port
10002
hive.scratch.dir.permission
777
7.配置mysql数据库
create database hive_db character set utf8;
grant all on hive_db.* to 'root'@'hive' identified by 'passwd';
flush privileges;
mysql配置远程访问权限,5和8不一样,不成功请百度。
8.初始化hive数据库
cd /data/hadoop/apache-hive-3.1.2-bin
bin/schematool --dbType mysql --initSchema
如果报log4j冲突,将hadoop里面的新版本放入hive的lib里面,删掉旧版本。
9.hive的历史命令存放在~/.hivehistory
10.启动hiveserver2服务
hive --service hiveserver2 & > /dev/null
11.打开webui
http://hive:10002/hiveserver2.jsp
三、安装flink单机
1.下载
2.配置
vim /data/flink/flink/conf/flink-conf.yaml
3.修改master
vim master
#添加下面内容
localhost:8081
4.修改worker
locahost