Springboot2(37)集成hive

源码地址

springboot2教程系列

Hive基本命令

添加依赖


		
			org.apache.hive
			hive-jdbc
			${hive.version}
			
				
					org.eclipse.jetty.aggregate
					*
				
				
					org.slf4j
					*
				
			
		

		
			org.springframework.data
			spring-data-hadoop
			${spring-data-hadoop.version}
			
				
					javax.servlet
					*
				
			
		

		
			com.alibaba
			druid-spring-boot-starter
			1.1.1
		


		
			org.springframework.boot
			spring-boot-starter-jdbc
		

		
			org.apache.tomcat
			tomcat-jdbc
		

添加配置

hive:
  url: jdbc:hive2://10.10.1.141:10000/hive
  driver-class-name: org.apache.hive.jdbc.HiveDriver
  type: com.alibaba.druid.pool.DruidDataSource
  user: bi
  password: rojao123
  # 下面为连接池的补充设置,应用到上面所有数据源中
  # 初始化大小,最小,最大
  initialSize: 1
  minIdle: 3
  maxActive: 20
  # 配置获取连接等待超时的时间
  maxWait: 60000
  # 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
  timeBetweenEvictionRunsMillis: 60000
  # 配置一个连接在池中最小生存的时间,单位是毫秒
  minEvictableIdleTimeMillis: 30000
  validationQuery: select 1
  testWhileIdle: true
  testOnBorrow: false
  testOnReturn: false
  # 打开PSCache,并且指定每个连接上PSCache的大小
  poolPreparedStatements: true
  maxPoolPreparedStatementPerConnectionSize: 20

数据连接池配置

@Configuration
@ConfigurationProperties(prefix = "hive")
@Data
public class HiveDruidConfig {

    private String url;
    private String user;
    private String password;
    private String driverClassName;
    private int initialSize;
    private int minIdle;
    private int maxActive;
    private int maxWait;
    private int timeBetweenEvictionRunsMillis;
    private int minEvictableIdleTimeMillis;
    private String validationQuery;
    private boolean testWhileIdle;
    private boolean testOnBorrow;
    private boolean testOnReturn;
    private boolean poolPreparedStatements;
    private int maxPoolPreparedStatementPerConnectionSize;

    @Bean(name = "hiveDruidDataSource")
    @Qualifier("hiveDruidDataSource")
    public DataSource dataSource() {
        DruidDataSource datasource = new DruidDataSource();
        datasource.setUrl(url);
        datasource.setUsername(user);
        datasource.setPassword(password);
        datasource.setDriverClassName(driverClassName);

        // pool configuration
        datasource.setInitialSize(initialSize);
        datasource.setMinIdle(minIdle);
        datasource.setMaxActive(maxActive);
        datasource.setMaxWait(maxWait);
        datasource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
        datasource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
        datasource.setValidationQuery(validationQuery);
        datasource.setTestWhileIdle(testWhileIdle);
        datasource.setTestOnBorrow(testOnBorrow);
        datasource.setTestOnReturn(testOnReturn);
        datasource.setPoolPreparedStatements(poolPreparedStatements);
        datasource.setMaxPoolPreparedStatementPerConnectionSize(
            maxPoolPreparedStatementPerConnectionSize);
        return datasource;
    }

    // 此处省略各个属性的get和set方法

    @Bean(name = "hiveDruidTemplate")
    public JdbcTemplate hiveDruidTemplate(
        @Qualifier("hiveDruidDataSource") DataSource dataSource) {
        return new JdbcTemplate(dataSource);
    }

}

service类

@Service
public class BusReceiverServiceImp implements BusReceiverService {

    @Autowired
    JdbcTemplate hiveJdbcTemplate;

    @Override
    @PostConstruct
    public void createTable() {
        StringBuffer sql = new StringBuffer("create table IF NOT EXISTS ");
        sql.append("bus_receiver ");
        sql.append("(id BIGINT comment '主键ID' " +
                ",name STRING  comment '姓名' " +
                ",address STRING comment '地址'" +
                ",en_name STRING comment '拼音名字'" +
                ",member_family INT comment '家庭成员'" +
                ",createDate DATE comment '创建时') ");
        sql.append(" ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'"); // 定义分隔符
        sql.append(" STORED AS TEXTFILE"); // 作为文本存储*/
         hiveJdbcTemplate.execute(sql.toString());
    }

    public void loadData(String pathFile){
        String sql = "LOAD DATA INPATH  '"+pathFile+"' INTO TABLE bus_receiver";
        hiveJdbcTemplate.execute(sql);
    }

    @Override
    public void insert(BusReceiverEntity busReceiverEntity) {
        hiveJdbcTemplate.update("insert into bus_receiver(id,name,address,en_name,member_family) values(?,?,?,?,?)",
            new PreparedStatementSetter(){
                @Override
                public void setValues(java.sql.PreparedStatement ps) throws SQLException {
                    ps.setLong(1, busReceiverEntity.getId());
                    ps.setString(2,busReceiverEntity.getName());
                    ps.setString(3,busReceiverEntity.getAddress());
                    ps.setString(4,busReceiverEntity.getEnName());
                    ps.setInt(5,busReceiverEntity.getMemberFamily());
                    // ps.setDate(6,new java.sql.Date(new Date().getTime()));
                    /* ps.setString(7,busReceiverEntity.getRegionCode());*/
                }
            }
        );
    }

    public void deleteAll(){
        String sql = "insert overwrite table bus_receiver select * from bus_receiver where 1=0";
        hiveJdbcTemplate.execute(sql);
    }
}

hive需要启动hive客户端程序: hive --service hiveserver2 (java客户端可以调用此hive服务)

你可能感兴趣的:(springboot2,大数据,springboot2)