org.apache.hadoop
hadoop-common
2.6.0
org.apache.hadoop
hadoop-mapreduce-client-core
2.6.0
org.apache.hadoop
hadoop-mapreduce-client-common
2.6.0
org.apache.hadoop
hadoop-hdfs
2.6.0
jdk.tools
jdk.tools
1.8
system
${JAVA_HOME}/lib/tools.jar
org.springframework.boot
spring-boot-configuration-processor
true
org.apache.hive
hive-jdbc
2.1.1
org.eclipse.jetty.aggregate
*
#配置多个数据源(这里测试hive数据源)
spring:
datasource:
hive: #hive数据源
url: jdbc:hive2://47.100.200.200:9019/default
type: com.alibaba.druid.pool.DruidDataSource
username: sixmonth
password: sixmonth
driver-class-name: org.apache.hive.jdbc.HiveDriver
commonConfig: #连接池统一配置,应用到所有的数据源
initialSize: 1
minIdle: 1
maxIdle: 5
maxActive: 50
maxWait: 10000
timeBetweenEvictionRunsMillis: 10000
minEvictableIdleTimeMillis: 300000
validationQuery: select 'x'
testWhileIdle: true
testOnBorrow: false
testOnReturn: false
poolPreparedStatements: true
maxOpenPreparedStatements: 20
filters: stat
package com.springboot.sixmonth.common.config.druid;
import java.util.Map;
import org.springframework.boot.context.properties.ConfigurationProperties;
/**
* -统一属性控制类,获取配置文件属性
* @author sixmonth
* @Date 2019年5月18日
*
*/
@ConfigurationProperties(prefix = DataSourceProperties.DS, ignoreUnknownFields = false)
public class DataSourceProperties {
final static String DS = "spring.datasource";
private Map hive;
private Map commonConfig;
/*为节省空间,这里省略set和get方法,需自行添加上去*/
}
package com.springboot.sixmonth.common.config.druid;
import org.springframework.boot.context.properties.ConfigurationProperties;
/**
* -扩展连接池,通用配置属性,可应用到所有数据源
* @author sixmonth
* @Date 2019年5月18日
*
*/
@ConfigurationProperties(prefix = DataSourceCommonProperties.DS, ignoreUnknownFields = false)
public class DataSourceCommonProperties {
final static String DS = "spring.datasource.commonConfig";
private int initialSize = 10;
private int minIdle;
private int maxIdle;
private int maxActive;
private int maxWait;
private int timeBetweenEvictionRunsMillis;
private int minEvictableIdleTimeMillis;
private String validationQuery;
private boolean testWhileIdle;
private boolean testOnBorrow;
private boolean testOnReturn;
private boolean poolPreparedStatements;
private int maxOpenPreparedStatements;
private String filters;
private String mapperLocations;
private String typeAliasPackage;
/*为节省空间,这里省略set和get方法,需自行添加上去*/
}
package com.springboot.sixmonth.common.config.druid.hive;
import java.sql.SQLException;
import javax.sql.DataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import com.alibaba.druid.pool.DruidDataSource;
import com.springboot.sixmonth.common.config.druid.DataSourceCommonProperties;
import com.springboot.sixmonth.common.config.druid.DataSourceProperties;
/**
* -配置hive数据源
* @author sixmonth
* @Date 2019年5月18日
*
*/
@Configuration
@EnableConfigurationProperties({DataSourceProperties.class,DataSourceCommonProperties.class})//将配置类注入到bean容器,使ConfigurationProperties注解类生效
public class HiveDruidConfig {
private static Logger logger = LoggerFactory.getLogger(HiveDruidConfig.class);
@Autowired
private DataSourceProperties dataSourceProperties;
@Autowired
private DataSourceCommonProperties dataSourceCommonProperties;
@Bean("hiveDruidDataSource") //新建bean实例
@Qualifier("hiveDruidDataSource")//标识
public DataSource dataSource(){
DruidDataSource datasource = new DruidDataSource();
//配置数据源属性
datasource.setUrl(dataSourceProperties.getHive().get("url"));
datasource.setUsername(dataSourceProperties.getHive().get("username"));
datasource.setPassword(dataSourceProperties.getHive().get("password"));
datasource.setDriverClassName(dataSourceProperties.getHive().get("driver-class-name"));
//配置统一属性
datasource.setInitialSize(dataSourceCommonProperties.getInitialSize());
datasource.setMinIdle(dataSourceCommonProperties.getMinIdle());
datasource.setMaxActive(dataSourceCommonProperties.getMaxActive());
datasource.setMaxWait(dataSourceCommonProperties.getMaxWait());
datasource.setTimeBetweenEvictionRunsMillis(dataSourceCommonProperties.getTimeBetweenEvictionRunsMillis());
datasource.setMinEvictableIdleTimeMillis(dataSourceCommonProperties.getMinEvictableIdleTimeMillis());
datasource.setValidationQuery(dataSourceCommonProperties.getValidationQuery());
datasource.setTestWhileIdle(dataSourceCommonProperties.isTestWhileIdle());
datasource.setTestOnBorrow(dataSourceCommonProperties.isTestOnBorrow());
datasource.setTestOnReturn(dataSourceCommonProperties.isTestOnReturn());
datasource.setPoolPreparedStatements(dataSourceCommonProperties.isPoolPreparedStatements());
try {
datasource.setFilters(dataSourceCommonProperties.getFilters());
} catch (SQLException e) {
logger.error("Druid configuration initialization filter error.", e);
}
return datasource;
}
}
package com.springboot.sixmonth.common.config.jdbcConfig;
import javax.sql.DataSource;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Repository;
/**
* -注入hive数据源
* @author sixmonth
* @Date 2019年5月18日
*
*/
@Repository
public class HiveJdbcBaseDaoImpl {
private JdbcTemplate jdbcTemplate;
public JdbcTemplate getJdbcTemplate() {
return jdbcTemplate;
}
@Autowired
public void setJdbcTemplate(@Qualifier("hiveDruidDataSource") DataSource dataSource) {
this.jdbcTemplate = new JdbcTemplate(dataSource);
}
}
package com.springboot.sixmonth.dao.jdbcDao.test;
import org.springframework.stereotype.Repository;
import com.springboot.sixmonth.common.config.jdbcConfig.HiveJdbcBaseDaoImpl;
/**
* -测试hive连接
* @author sixmonth
* @Date 2019年5月18日
*
*/
@Repository
public class TestHiveDao extends HiveJdbcBaseDaoImpl{
/**
* 测试获取hive数据库数据信息
* @return
*/
public String test() {
String sql = "SELECT name from sixmonth limit 1";
String param = this.getJdbcTemplate().queryForObject(sql,String.class);
return param;
}
}
@Autowired
private TestHiveDao testHiveDao;