docker安装hive

安装镜像

docker pull debian:9
docker start -it debian:9 /bin/bash
docker exet -it debian:9 /bin/bash
docker commit -m="has update" -a="study" e218edb10161 study/debian 

更换镜像

cp /etc/apt/sources.list /etc/apt/sources.list.bak ; # 先备份
echo " " > /etc/apt/sources.list ; # 清空文件
echo "deb http://mirrors.aliyun.com/debian jessie main" >> /etc/apt/sources.list ;
echo "deb http://mirrors.aliyun.com/debian jessie-updates main" >> /etc/apt/sources.list ;

apt-get clean;  # 清空缓存
apt-get update;  # 更新
apt-get -y install vim; # -y自动确认,更方便 
apt-get -y install procps # ps
apt-get install -y wget

安装ssh并设置免登陆

apt-get install ssh
apt-get install pdsh
/etc/init.d/ssh start
ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa
cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
chmod 0600 ~/.ssh/authorized_keys
#测试
ssh localhost

安装和配置java

docker cp ~/Downloads/jdk-8u261-linux-x64.tar.gz ba83e1829124:/home
apt remove openjdk*  #删除openjdk
tar -zxvf jdk-8u261-linux-x64.tar.gz
mv jdk1.8.0_261/ /usr/local/java
vim /etc/profile
在末尾添加以下环境变量
export JAVA_HOME=/usr/local/java
export JRE_HOME=$JAVA_HOME/jre
export CLASSPATH=.:$JAVA_HOME/lib:$JRE_HOME/lib:$CLASSPATH
export PATH=$JAVA_HOME/bin:$JRE_HOME/bin:$PATH
添加完后退出,然后使修改生效
source /etc/profile

安装hadoop

wget https://mirror.bit.edu.cn/apache/hadoop/common/hadoop-3.2.1/hadoop-3.2.1.tar.gz
tar -zxf hadoop-3.2.1.tar.gz 
mv hadoop-3.2.1 /usr/local/hadoop
vim /usr/local/hadoop/etc/hadoop/hadoop-env.sh
# 在以下位置添加java路径:
# The java implementation to use. By default, this environment
# variable is REQUIRED on ALL platforms except OS X!
export JAVA_HOME=/usr/local/java

按官网配置https://hadoop.apache.org/docs/stable/hadoop-project-dist/hadoop-common/SingleCluster.html

错误处理

Attempting to operate on hdfs namenode as root

#将start-dfs.sh,stop-dfs.sh两个文件顶部添加以下参数
#!/usr/bin/env bash
HDFS_DATANODE_USER=root
HADOOP_SECURE_DN_USER=hdfs
HDFS_NAMENODE_USER=root
HDFS_SECONDARYNAMENODE_USER=root
#start-yarn.sh,stop-yarn.sh顶部也需添加以下:
#!/usr/bin/env bash
YARN_RESOURCEMANAGER_USER=root
HADOOP_SECURE_DN_USER=yarn
YARN_NODEMANAGER_USER=root

pdsh localhost Connection refused

pdsh@xxx: localhost: connect: Connection refused
#环境变量/etc/profile里加入
export PDSH_RCMD_TYPE=ssh

安装MariaDB

apt-get install -y mysql-server          //服务端
apt-get install -y mysql-client          //客户端
service mysql start

下载驱动 https://downloads.mariadb.org/connector-java/

安装Hive

tar -zxf apache-hive-3.1.2-bin.tar.gz
mv apache-hive-3.1.2-bin /usr/local/hive
docker cp Downloads/mariadb-java-client-2.6.2.jar 41602cd4c109:/usr/local/hive/lib

修改配置文件

vim /etc/profile
export HIVE_HOME=/usr/local/hive
export HIVE_CONF_DIR=${HIVE_HOME}/conf
source /etc/profile
#在修改配置文件之前,需要先在本地opt(可以是任意目录)目录下建立一些文件夹。
mkdir /opt/hive
mkdir /opt/hive/warehouse
#在hadoop上新建同样的目录并赋予读写权限:
hadoop fs -mkdir -p /opt
hadoop fs -mkdir -p /opt/hive/
hadoop fs -mkdir -p /opt/hive/warehouse
hadoop fs -chmod 777 /opt/hive/
hadoop fs -chmod 777 /opt/hive/warehouse
#配置
cp hive-env.sh.template hive-env.sh
# hive-env.sh中加入
export JAVA_HOME=/usr/local/java
export HADOOP_HOME=/usr/local/hadoop
export HIVE_HOME=/usr/local/hive
source hive-env.sh

修改 hive-site.xml

cp hive-default.xml.template hive-site.xml
vim hive-site.xml



    hive.metastore.warehouse.dir
    /root/hive/warehouse

 

    hive.exec.scratchdir
    /root/hive

 


    hive.metastore.uris
    

 

 


    javax.jdo.option.ConnectionURL
    jdbc:mysql://localhost:3306/metastore?createDatabaseIfNotExist=true&useSSL=false
    
        JDBC connect string for a JDBC metastore.
        To use SSL to encrypt/authenticate the connection, provide database-specific SSL flag in the connection URL.
        For example, jdbc:postgresql://myhost/db?ssl=true for postgres database.
    



    javax.jdo.option.ConnectionDriverName
    com.mysql.jdbc.Driver



    javax.jdo.option.ConnectionUserName
    root



    javax.jdo.option.ConnectionPassword
    password


    hive.metastore.schema.verification
    false
    
    

然后将配置文件中所有的

${system:java.io.tmpdir}更改为 /opt/tmp/hive (如果没有该文件则创建),并将hive文件夹赋予777权限,将
${system:user.name}更改为 root

首先初始化数据库

#初始化的时候注意要将mysql启动
./schematool -initSchema -dbType mysql
#进入hive了:
./hive

报错处理

  1. java.lang.NoSuchMethodError:com.google.common.base.Preconditions.checkArgument
    查看hadoop安装目录下share/hadoop/common/lib内guava.jar版本
    查看hive安装目录下lib内guava.jar的版本 如果两者不一致,删除版本低的,并拷贝高版本的 问题解决
    2.配置文件 hive-site.xml 3186行96个字符不合法,删除特殊字符

启动hive

#使用的本地metastore,直接通过hive命令启动
./hive #相当于 ./hive --service cli
#启动metastore服务,默认9083,也可以通过-p指定端口号
./hive --service metastore -p 9083
#启动hiveserver2
./hive --service hiveserver2

你可能感兴趣的:(docker安装hive)