flink read 带Kerberos的hbase

package com.hx.test;

import org.apache.flink.api.common.restartstrategy.RestartStrategies;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.security.UserGroupInformation;

import java.io.IOException;
import java.util.Iterator;


public class FlinkReadHbase {

    public static void main(String[] args) throws Exception{

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.getConfig().setRestartStrategy(RestartStrategies.fixedDelayRestart(3,1000));

        System.setProperty("java.security.auth.login.config","D:\\workspace\\flink2doris\\src\\main\\resources\\kafka_client_jaas.conf");
        System.setProperty("java.security.krb5.conf","D:\\workspace\\flink2doris\\src\\main\\resources\\krb5.ini");
        System.setProperty("javax.security.auth.useSubjectCredsOnly", "false");

        DataStream> result = env.addSource(new HBaseReader());

        result.print();
        env.execute();
    }

    public static class HBaseReader extends RichSourceFunction> {

        private Connection conn = null;
        private Table table = null;
        private Scan scan = null;

        @Override
        public void open(Configuration parameters) throws Exception {
            super.open(parameters);
            org.apache.hadoop.conf.Configuration conf = HBaseConfiguration.create();
            conf.set("hbase.zookeeper.quorum", "cdp-kfk01.hx.tc,cdp-kfk02.hx.tc,cdp-kfk03.hx.tc");
            conf.set(HConstants.ZOOKEEPER_CLIENT_PORT, "2181"); //2181
            conf.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/hbase"); //hbase

            //        认证需要的参数
//        hbase authentication is kerberos ,rpc is privacy

            //
            conf.set("hadoop.security.authentication", "kerberos");
            conf.set("hbase.security.authentication","kerberos");
            conf.set("hbase.rpc.protection", "privacy");

            // 指定kerberos配置参数
            conf.set("keytab.file", "D:/soft/kerbros/hbase.keytab");
            conf.set("hbase.master.kerberos.principal", "hbase/[email protected]");
            conf.set("hbase.regionserver.kerberos.principal", "hbase/[email protected]");
            conf.set("kerberos.principal", "hbase/[email protected]");

            // 设置配置文件信息
            UserGroupInformation.setConfiguration(conf);
            // 通过keytab登录用户
            System.out.println("--------------> 开始进行keytab认证 <--------------");
            UserGroupInformation.loginUserFromKeytab("hbase/[email protected]", "D:/soft/kerbros/hbase.keytab");
            System.out.println("--------------> keytab认证结束 <--------------");


            conn = ConnectionFactory.createConnection(conf);
            table = conn.getTable(TableName.valueOf("ods_sr_prd:tbl_store"));
            scan = new Scan();

            scan.addFamily(Bytes.toBytes("cf"));
            scan.withStartRow(Bytes.toBytes("1"));
            scan.withStopRow(Bytes.toBytes("1"),true);


        }

        @Override
        public void run(SourceContext> ctx) throws Exception {
            ResultScanner rs = table.getScanner(scan);
            Iterator iterator = rs.iterator();
            while (iterator.hasNext()) {
                Result result = iterator.next();
                String rowkey = Bytes.toString(result.getRow());
                StringBuffer sb = new StringBuffer();
                for (Cell cell : result.listCells()) {
                    String value = Bytes.toString(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
                    sb.append(value).append(",");
                }
                String valueString = sb.replace(sb.length() - 1, sb.length(), "").toString();
                Tuple2 tuple2 = new Tuple2<>();
                tuple2.setFields(rowkey, valueString);
                ctx.collect(tuple2);
            }

        }

        @Override
        public void cancel() {
            try {
                if (table != null) {
                    table.close();
                }
                if (conn != null) {
                    conn.close();
                }
            } catch (IOException e) {
                System.out.println("Close HBase Exception666:"+ e.toString());
            }

        }
    }


}


-------------------------------------------------pom 文件---------------------------------------------


         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    4.0.0

    org.com.hx
    flink2doris
    1.0-SNAPSHOT

    
        8
        8
        512m
        1.13.2

        UTF-8
        1.8
        2.11
        ${java.version}
        ${java.version}
        1.2.17

        2.1.0-cdh6.3.2
        3.0.0-cdh6.3.2
    


    
        
            org.apache.flink
            flink-java
            ${flink.version}
        

        
            org.apache.flink
            flink-streaming-java_2.11
            ${flink.version}
        

        
            org.apache.flink
            flink-clients_2.11
            ${flink.version}
        

        
        
            org.apache.flink
            flink-table-planner_2.11
            ${flink.version}
        


        
            org.apache.flink
            flink-table-planner-blink_2.11
            ${flink.version}
        

        
        
            org.apache.flink
            flink-table-api-java-bridge_2.11
            ${flink.version}
        

        
            org.apache.flink
            flink-table-common
            ${flink.version}
        

        
            org.apache.flink
            flink-table
            ${flink.version}
            pom
        

        
            org.apache.flink
            flink-json
            ${flink.version}
        

        
            org.apache.flink
            flink-csv
            ${flink.version}
        

        
            org.apache.flink
            flink-connector-kafka_2.11
            1.13.2
        


        
            org.apache.bahir
            flink-connector-redis_2.11
            1.0
        

        
        
            org.apache.flink
            flink-connector-elasticsearch6_2.11
            1.13.2
        

        
            mysql
            mysql-connector-java
            5.1.47
        

        
            org.apache.hadoop
            hadoop-common
            ${hadoop.version}
        

        
            org.apache.hadoop
            hadoop-client
            ${hadoop.version}
            
            
                
                    io.netty
                    netty
                

            

        

        
            com.alibaba
            fastjson
            1.2.62
        

       
        
            org.apache.flink
            flink-connector-hbase-base_${scala.binary.version}
            ${flink.version}
            
                
                    hadoop-hdfs
                    org.apache.hadoop
                

                
                    hadoop-common
                    org.apache.hadoop
                

                
                    org.apache.hadoop
                    hadoop-auth
                

                
                    hbase-protocol
                    org.apache.hbase
                

                
                    hbase-prefix-tree
                    org.apache.hbase
                

                
                    hbase-server
                    org.apache.hbase
                

            

        

        
            org.apache.flink
            flink-hbase_2.11
            1.10.3
        

        
            org.apache.hbase
            hbase-client
            ${hbase.version}
            
                
                    hadoop-common
                    org.apache.hadoop
                

                
                    hadoop-auth
                    org.apache.hadoop
                

            

        

        
            org.projectlombok
            lombok
            RELEASE
            compile
        

    

    
        
            cloudera
            https://mvnrepository.com/artifact/org.apache.flink/flink-connector-kafka-0.11
            
                true
            

            
                false
            

        











    

    
        src/main/java

        

            
                net.alchim31.maven
                scala-maven-plugin
                3.2.2
                
                    
                        
                            compile
                            testCompile
                        

                        
                            
                                -dependencyfile
                                ${project.build.directory}/.scala_dependencies
                            

                        

                    

                

            

            

                org.apache.maven.plugins
                maven-shade-plugin
                3.1.0
                

                    

                        package
                        
                            shade
                        

                        
                            
                                                                         implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
                            

                            
                                
                                    org.apache.http
                                    org.apache.myhttp
                                

                            

                            
                                
                                    *:*
                                    
                                        META-INF/maven/**
                                        META-INF/*.SF
                                        META-INF/*.DSA
                                        META-INF/*.RSA
                                    

                                

                            

                        

                    
                
            

            
                org.codehaus.mojo
                exec-maven-plugin
                1.2.1
                
                    
                        
                            exec
                        

                    

                

                
                    java
                    true
                    false
                    compile
                    
                

            

            
            
                org.apache.maven.plugins
                maven-compiler-plugin
                
                    1.8
                    1.8
                

            

        
    

你可能感兴趣的:(flink,hbase,flink,hbase,big,data)