HIVE连接池并指定spark

一、配置文件.yml中配置DruidDataSource配置属性信息
可以参考Druid的官方github
二、DruidDataSource和JdbcTemplate的配置类DruidConfiguation
容器初始化后,加载DruidConfiguation,相关代码如下:
@Configuration
public class DruidConfiguration {
    @Value("${hive.datasource.url}")
    private String dbUrl;
    @Value("${hive.datasource.username}")
    private String username;
    @Value("${hive.datasource.password}")
    private String password;
    @Value("${hive.datasource.driverClassName}")
    private String driverClassName;
    @Value("${hive.datasource.initialSize}")
    private int initialSize;
    @Value("${hive.datasource.minIdle}")
    private int minIdle;
    @Value("${hive.datasource.maxActive}")
    private int maxActive;
    @Value("${hive.datasource.maxWait}")
    private int maxWait;
    @Value("${hive.datasource.timeBetweenEvictionRunsMillis}")
    private int timeBetweenEvictionRunsMillis;
    @Value("${hive.datasource.minEvictableIdleTimeMillis}")
    private int minEvictableIdleTimeMillis;
    @Value("${hive.datasource.validationQuery}")
    private String validationQuery;
    @Value("${hive.datasource.testWhileIdle}")
    private boolean testWhileIdle;
    @Value("${hive.datasource.testOnBorrow}")
    private boolean testOnBorrow;
    @Value("${hive.datasource.testOnReturn}")
    private boolean testOnReturn;
    @Value("${hive.datasource.poolPreparedStatements}")
    private boolean poolPreparedStatements;
    @Value("${hive.datasource.maxPoolPreparedStatementPerConnectionSize}")
    private int maxPoolPreparedStatementPerConnectionSize;
    ***//注意这里,指定创建的连接初始化执行sql: set hive.execution.engine=spark,使得hive执行引擎为spark***
    @Value("${hive.datasource.connectionInitSqls}")
    private String connectionInitSqls;
    @Bean(name = "hiveJdbcDataSource")
    @Qualifier("hiveJdbcDataSource")
    public DataSource dataSource() {
        DruidDataSource datasource = new DruidDataSource();
        datasource.setUrl(this.dbUrl);
        datasource.setUsername(username);
        datasource.setPassword(password);
        datasource.setDriverClassName(driverClassName);
        //configuration
        datasource.setInitialSize(initialSize);
        datasource.setMinIdle(minIdle);
        datasource.setMaxActive(maxActive);
        datasource.setMaxWait(maxWait);
        datasource.setTimeBetweenEvictionRunsMillis(timeBetweenEvictionRunsMillis);
        datasource.setMinEvictableIdleTimeMillis(minEvictableIdleTimeMillis);
        datasource.setValidationQuery(validationQuery);
        datasource.setTestWhileIdle(testWhileIdle);
        datasource.setTestOnBorrow(testOnBorrow);
        datasource.setTestOnReturn(testOnReturn);
        datasource.setPoolPreparedStatements(poolPreparedStatements);
        datasource.setMaxPoolPreparedStatementPerConnectionSize(maxPoolPreparedStatementPerConnectionSize);
        String[] initSqls = StringUtils.split(connectionInitSqls,",");
        datasource.setConnectionInitSqls( Arrays.asList(initSqls));
        return datasource;
    }
    @Bean(name = "hiveJdbcTemplate")
    public JdbcTemplate hiveJdbcTemplate(@Qualifier("hiveJdbcDataSource") DataSource dataSource) {
        return new JdbcTemplate(dataSource);
    }
}

三、使用JdbcTemplate调用业务SQL

1.业务层引用Template

@Autowired
private JdbcTemplate hiveJdbcTemplate;

2.查询示例

String sql = "Select 1";
List> mapList  = hiveJdbcTemplate.queryForList(sql);

你可能感兴趣的:(学习)