apache-hive-2.3.4-bin.tar.gz
hadoop-2.9.2.tar.gz
未说明的情况下,宿主机操作命令的默认路径为/opt,容器内操作的默认路径为/usr/local
#!/bin/bash
/usr/sbin/sshd -D
#将密钥文件复制到/etc/ssh/目录中。这里要用root的权限生成key
RUN mkdir -p /root/.ssh
#生成秘钥、公钥
RUN ssh-keygen -t rsa -b 2048 -P '' -f /root/.ssh/id_rsa
RUN cat /root/.ssh/id_rsa.pub > /root/.ssh/authorized_keys
RUN cp /root/.ssh/id_rsa /etc/ssh/ssh_host_rsa_key
RUN cp /root/.ssh/id_rsa.pub /etc/ssh/ssh_host_rsa_key.pub
# 安装 jre 1.8
RUN yum -y install java-1.8.0-openjdk.x86_64
ENV JAVA_HOME=/etc/alternatives/jre_1.8.0
#定义时区参数
ENV TZ=Asia/Shanghai
#设置时区
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo '$TZ' > /etc/timezone
#将ssh服务启动脚本复制到/usr/local/sbin目录中,并改变权限为755
ADD run.sh /usr/local/sbin/run.sh
RUN chmod 755 /usr/local/sbin/run.sh
#变更root密码为root
RUN echo "root:root"| chpasswd
#开放窗口的22端口 根据自己的需求来增加或删除
EXPOSE 22
EXPOSE 8080
EXPOSE 9000
EXPOSE 50070
EXPOSE 10000
EXPOSE 8088
EXPOSE 10002
#运行脚本,启动sshd服务
CMD ["/usr/local/sbin/run.sh"]
docker build -t my_centos:v2
docker run -d -P --name myhive my_centos:v2 /usr/local/sbin/run.sh
docker exec -it myhive bash
# 在宿主机上拷贝安装包到 myhive 容器,这里我将 hive 解压的工作一起执行了
docker cp hadoop-2.9.2.tar.gz myhive:/usr/local
docker cp apache-hive-2.3.4-bin.tar.gz myhive:/usr/local
# 进入容器
docker exec -it myhive bash
# 解压安装包
tar xvf hadoop-2.9.2.tar.gz
tar xvf apache-hive-2.3.4-bin.tar.gz
fs.defaultFS
hdfs://127.0.0.1:9000
dfs.replication
1
dfs.permissions
false
cp /usr/local/hadoop-2.9.2/etc/hadoop/mapred-site.xml.template /usr/local/hadoop-2.9.2/etc/hadoop/mapred-site.xml
vim /usr/local/hadoop-2.9.2/etc/hadoop/mapred-site.xml# 在里增加
mapreduce.framework.name
yarn
# 将原来的 export JAVA_HOME=${JAVA_HOME} 改成下面这个
export JAVA_HOME="/etc/alternatives/jre_1.8.0"
# 在文本最后加上 hive 的环境变量一并加上
export HADOOP_HOME="/usr/local/hadoop-2.9.2"
export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
export HIVE_HOME="/usr/local/apache-hive-2.3.4-bin"
export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$HIVE_HOME/bin
source /etc/profile
hadoop namenode -format
# 命令执行中如遇提示直接输入yes即可
start-dfs.sh
# 通过ps -ef | grep hadoop我们可以看到这几个进程分别是NameNode、SecondaryNamenode、Datanode。如果少了就要注意hdfs是否没有正常启动了。
start-yarn.sh
# 新建一个目录
hadoop fs -mkdir /test
# 查看是否有对应目录了
hadoop fs -ls /
# 最后输出
Found 1 items
drwxr-xr-x - root supergroup 0 2019-05-10 16:37 /test
system:java.io.tmpdir
/tmp/hive/java
system:user.name
${user.name}
Exception in thread "main" java.lang.IllegalArgumentException: java.net.URISyntaxException: Relative path in absolute URI: ${system:java.io.tmpdir%7D/$%7Bsystem:user.name%7D
at org.apache.hadoop.fs.Path.initialize(Path.java:205)
at org.apache.hadoop.fs.Path.(Path.java:171)
at org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:644)
at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:563)
at org.apache.hadoop.hive.ql.session.SessionState.beginStart(SessionState.java:531)
at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:705)
at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:641)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
at org.apache.hadoop.util.RunJar.main(RunJar.java:136)
Caused by: java.net.URISyntaxException: Relative path in absolute URI: ${system:java.io.tmpdir%7D/$%7Bsystem:user.name%7D
at java.net.URI.checkPath(URI.java:1823)
at java.net.URI.(URI.java:745)
at org.apache.hadoop.fs.Path.initialize(Path.java:202)
... 12 more
# 这里先直接使用hive内置的derby作为元数据的数据库
schematool -initSchema -dbType derby
1,jack
2,hel
3,nack
# 建表
create table test(id int,name string)row format delimited fields terminated by ',';
# 导入数据
load data local inpath '/usr/local/test.txt' into table test;
# 查询刚才导入的数据
select * from test;
# 查询结果:
OK
1 jack
2 hel
3 nack
javax.jdo.option.ConnectionUserName
hive
javax.jdo.option.ConnectionPassword
@Admin123
javax.jdo.option.ConnectionURL
jdbc:mysql://172.17.0.3:3306/hive?useSSL=false
javax.jdo.option.ConnectionDriverName
com.mysql.jdbc.Driver
主要修改数据库url、驱动、用户名、密码,数据库url记得填mysql实例的地址,我使用的mysql实例是在另一个docker容器上,所以直接填那个容器的ip。
cd /usr/local/apache-hive-2.3.4-bin/lib
wget http://central.maven.org/maven2/mysql/mysql-connector-java/5.1.47/mysql-connector-java-5.1.47.jar
schematool -initSchema -dbType mysql
Error: Duplicate key name 'PCS_STATS_IDX' (state=42000,code=1061)
org.apache.hadoop.hive.metastore.HiveMetaException: Schema initialization FAILED! Metastore state would be inconsistent !!
Underlying cause: java.io.IOException : Schema script failed, errorcode 2
Use --verbose for detailed stacktrace.
*** schemaTool failed ***
# 进入mysql容器
docker exec -it mysql bash
# 登陆mysql
mysql -uhive -p
# 删除 hive
drop database hive;
# 重新创建
create database hive;
# 返回myhive 容器
docker exec -it myhive bash
# 重新初始化元数据
schematool -initSchema -dbType mysql
Starting metastore schema initialization to 2.3.0
Initialization script hive-schema-2.3.0.mysql.sql
Initialization script completed
schemaTool completed
hadoop.proxyuser.root.hosts
*
hadoop.proxyuser.root.groups
*
stop-dfs.sh
start-dfs.sh
nohup hiveserver2 &
# 通过beeline连接
beeline -u jdbc:hive2://127.0.0.1:10000
# 查询hive库中表信息
select * from test;
例: