Spring Boot 整合Hive/Phoenix数据源

  1. 环境准备
    Hadoop : 2.7.3
    Hive : 1.2.1
    HBase : 1.2

  2. 添加依赖
    pom.xml文件

    
    	UTF-8
    	2.4.0.RELEASE
    	1.2.1
            4.7
    
     
    
    	
    		org.springframework.boot
    		spring-boot-starter-jdbc
    	
    	
    		org.springframework.data
    		spring-data-hadoop
    		${spring-data-hadoop.version}
    	
    	
    		org.apache.hive
    		hive-jdbc
    		${hive.version}
    	
            
              org.apache.phoenix
              phoenix-client
              ${phoenix-client.version}
         
    
    
  3. spring boot 配置文件
    application.yml文件中

    # hive 数据源自定义配置
    hive:
      url: jdbc:hive2://192.168.0.100:10000/default
      driver-class-name: org.apache.hive.jdbc.HiveDriver
      username: hive
      password: hive
     
    # phoenix 数据源自定义配置
    phoenix:
      enable: true
      url: jdbc:phoenix:192.168.0.100
      driver-class-name: org.apache.phoenix.jdbc.PhoenixDriver
      username: 
      password: 
      default-auto-commit: true
    
  4. Hive配置

    /**
     * hive数据源配置
     */
    @Configuration
    public class HiveDataSource {
    	
    	@Autowired
    	private Environment env;
     
    	@Bean(name = "hiveJdbcDataSource")
    	@Qualifier("hiveJdbcDataSource")
    	public DataSource dataSource() {
    		DruidDataSource dataSource = new DruidDataSource();
    		dataSource.setUrl(env.getProperty("hive.url"));
    		dataSource.setDriverClassName(env.getProperty("hive.driver-class-name"));
    		dataSource.setUsername(env.getProperty("hive.username"));
    		dataSource.setPassword(env.getProperty("hive.password"));
    		return dataSource;
    	}
    	
    	@Bean(name = "hiveJdbcTemplate")
    	public JdbcTemplate hiveJdbcTemplate(@Qualifier("hiveJdbcDataSource") DataSource dataSource) {
    		return new JdbcTemplate(dataSource);
    	}
    }
    
  5. Phoenix配置

    /**
     * phoenix数据源配置
     */
    @Configuration
    public class PhoenixDataSource {
    	
    	@Autowired
    	private Environment env;
     
    	@Bean(name = "phoenixJdbcDataSource")
    	@Qualifier("phoenixJdbcDataSource")
    	public DataSource dataSource() {
    		DruidDataSource dataSource = new DruidDataSource();
    		dataSource.setUrl(env.getProperty("phoenix.url"));
    		dataSource.setDriverClassName(env.getProperty("phoenix.driver-class-name"));
    		dataSource.setUsername(env.getProperty("phoenix.username"));//phoenix的用户名默认为空
    		dataSource.setPassword(env.getProperty("phoenix.password"));//phoenix的密码默认为空
    		dataSource.setDefaultAutoCommit(Boolean.valueOf(env.getProperty("phoenix.default-auto-commit")));
    		return dataSource;
    	}
           
            @Bean(name = "phoenixJdbcTemplate")
         public JdbcTemplate phoenixJdbcTemplate(@Qualifier("phoenixJdbcDataSource") DataSource dataSource) {
              return new JdbcTemplate(dataSource);
         }
    }
    
  6. Test
    Hive测试

    @RunWith(SpringJUnit4ClassRunner.class)
    @SpringApplicationConfiguration(HiveServiceApplication.class)
    public class MainTest {
    	
    	@Autowired
    	@Qualifier("hiveJdbcTemplate")
    	JdbcTemplate hiveJdbcTemplate;
    	
    	@Test  
    	public void DataSourceTest() { 
    		hiveJdbcTemplate.execute("SELECT * FROM tb_shop");
    	} 
    }
    

    Phoenix测试

    @RunWith(SpringJUnit4ClassRunner.class)
    @SpringApplicationConfiguration(HBaseServiceApplication.class)
    public class MainTest {
    	@Autowired
            @Qualifier("phoenixJdbcTemplate")
           JdbcTemplate phoenixJdbcTemplate;
    	
    	@Test  
    	public void DataSourceTest() {  	
    		phoenixJdbcTemplate.execute("SELECT * FROM tb_user");
    	}
    }
    

你可能感兴趣的:(Hive,HBase)