systemctl enable mysqld
tar -zxvf apache-hive-2.3.6-bin.tar.gz
mv apache-hive-2.3.6-bin hive
cd hive/conf
#hive
export HIVE_HOME=/usr/hive
#Hive配置文件路径
export HIVE_CONF_DIR=/usr/hive/conf
export PATH=$PATH:$HIVE_HOME/bin
cp hive-env.sh.template hive-env.sh
cp hive-default.xml.template hive-site.xml
cp hive-log4j2.properties.template hive-log4j2.properties
cp hive-exec-log4j2.properties.template hive-exec-log4j2.properties
#Java路径
export JAVA_HOME=/usr/java/jdk1.8.0_221-amd64
#Hadoop安装路径
export HADOOP_HOME=/usr/hadoop/hadoop-2.9.2
#Hive安装路径
export HIVE_HOME=/usr/hive
#Hive配置文件路径
export HIVE_CONF_DIR=/usr/hive/conf
# hive存放外部jar包的位置
export HIVE_AUX_JARS_PATH=/usr/hive/bin
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>javax.jdo.option.ConnectionURL</name>
//所连接的MySQL数据库实例 hive这个数据库将会在初始化hive的时候创建,当然也可以改为其他名称
<value>jdbc:mysql://localhost:3306/hive?createDatabaseIfNotExist=true</value>
</property>
<property>
<name>javax.jdo.option.ConnectionDriverName</name>
//连接的MySQL数据库驱动
<value>com.mysql.jdbc.Driver</value>
</property>
<property>
<name>javax.jdo.option.ConnectionUserName</name>
//连接的MySQL数据库用户名
<value>root</value>
</property>
<property>
<name>javax.jdo.option.ConnectionPassword</name>
//连接的MySQL数据库密码
<value>root</value>
</property>
</configuration>
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<!-- Exception in thread "main" java.lang.IllegalArgumentException: java.net.URISyntaxException: Relative path in absolute URI -->
<property>
<name>hive.exec.scratchdir</name>
<value>/tmp</value>
</property>
<property>
<name>hive.exec.local.scratchdir</name>
<!-- Relative path in absolute URI: ${system:user.name%7D -->
<!-- 去掉system: -->
/usr/hive/tmp/${user.name} </value>
<description>Local scratch space for Hive jobs</description>
</property>
<property>
<name>hive.downloaded.resources.dir</name>
<value>/usr/hive/tmp/${hive.session.id}_resources</value>
<description>Temporary local directory for added resources in the remote file system.</description>
</property>
<property>
<name>hive.metastore.warehouse.dir</name>
<value>/usr/hive/warehouse</value>
</property>
<property>
<name>hive.querylog.location</name>
<value>/usr/hive/logs/${user.name}</value>
</property>
<property>
<name>hive.server2.logging.operation.log.location</name>
<!-- Relative path in absolute URI: ${system:user.name%7D -->
<!-- 去掉system: -->
/usr/hive/logs/${user.name} /operation_logs</value>
<description>Top level directory where operation logs are stored if logging functionality is enabled</description>
</property>
<!-- 配置 MySQL 数据库连接信息 -->
<property>
<name>javax.jdo.option.ConnectionURL</name>
<value>jdbc:mysql://localhost:3306/hive?createDatabaseIfNotExist=true&characterEncoding=UTF-8&useSSL=false</value>
</property>
<property>
<name>javax.jdo.option.ConnectionDriverName</name>
<value>com.mysql.jdbc.Driver</value>
</property>
<property>
<name>javax.jdo.option.ConnectionUserName</name>
<value>root</value><!-- 指定mysql用户名 -->
</property>
<property>
<name>javax.jdo.option.ConnectionPassword</name>
<value>root</value><!-- 指定mysql密码 -->
</property>
</configuration>
hive/lib/
中(编写java连接mysql的那个jar包)schematool -dbType mysql derby -initSchema
数据获取地址:https://www.cocobolo.top/FileServer/prog-hive-1st-ed-data.zip
解压后可以得到本实验所需的stocks.csv和dividends.csv两个文件。
create table if not exists stocks
(
`exchange` string,
`symbol` string,
`ymd` string,
`price_open` float,
`price_high` float,
`price_low` float,
`price_close` float,
`volume` int,
`price_adj_close` float
)
row format delimited fields terminated by ',';
create external table if not exists dividends
(
`ymd` string,
`dividend` float
)
partitioned by(`exchange` string ,`symbol` string)
row format delimited fields terminated by ',';
操作语句如下:
load data local inpath '/usr/data/stocks/stocks.csv' overwrite into table stocks;
create external table if not exists dividends_unpartitioned
(
`exchange` string ,
`symbol` string,
`ymd` string,
`dividend` float
)
row format delimited fields terminated by ',';
导入数据
load data local inpath '/usr/data/dividends/dividends.csv' overwrite into table dividends_unpartitioned;
操作语句如下:
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.max.dynamic.partitions.pernode=1000;
insert overwrite table dividends partition(`exchange`,`symbol`) select `ymd`,`dividend`,`exchange`,`symbol` from dividends_unpartitioned;
select s.ymd,s.symbol,s.price_close
from stocks s
LEFT SEMI JOIN
dividends d
ON s.ymd=d.ymd and s.symbol=d.symbol
where s.symbol='IBM' and year(ymd)>=2000;
select ymd,
case
when price_close-price_open>0 then 'rise'
when price_close-price_open<0 then 'fall'
else 'unchanged'
end as situation
from stocks
where symbol='AAPL' and substring(ymd,0,7)='2008-10';
select `exchange`,symbol,ymd,price_close-price_open as `diff`
from
(
select *
from stocks
order by price_close-price_open desc
limit 1
)t;
select
year(ymd) as `year`,
avg(price_adj_close) as avg_price from stocks
where `exchange`='NASDAQ' and symbol='AAPL'
group by year(ymd)
having avg_price > 50;
select t2.`year`,symbol,t2.avg_price
from
(
select
*,row_number() over(partition by t1.`year` order by t1.avg_price desc) as `rank`
from
(
select
year(ymd) as `year`,
symbol,
avg(price_adj_close) as avg_price
from stocks
group by year(ymd),symbol
)t1
)t2
where t2.`rank`<=3;