flink 对接kafka写进hbase里(代码和pom)

hbase 里必须有表,不会自己创建

import java.util.Properties

import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer08
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hbase.{HBaseConfiguration, TableName}
import org.apache.hadoop.hbase.client.{Connection, ConnectionFactory, Put, Table}

object Test02 {

  def WriteHbase(x: String) = {
    val conf: Configuration = HBaseConfiguration.create()
    conf.set("hbase.zookeeper.quorum","node2:2181,node3:2181,node4:2181")
    val conn: Connection = ConnectionFactory.createConnection(conf)
    val table: Table = conn.getTable(TableName.valueOf("stu"))
    val arr: Array[String] = x.split(" ")
    if (arr.length != 3){
      println(x+"----插入失败,格式不匹配")
    }else{
      val put = new Put("rk001".getBytes)
      put.addColumn("info".getBytes,arr(1).getBytes,arr(2).getBytes)
      table.put(put)
      println(x+"--- 添加成功")
    }
  }



  def main(args: Array[String]): Unit = {
    import  org.apache.flink.api.scala._
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)
    val properties = new Properties()
    properties.setProperty("bootstrap.servers","node2:9092")
    properties.setProperty("zookeeper.connect","node2:2181")
    properties.setProperty("enable.auto.commit","true")
    properties.setProperty("group.id","test")
    val test: DataStream[String] = env.addSource(new FlinkKafkaConsumer08[String]("test", new SimpleStringSchema(), properties))
    test.print()
    test .map(x=>{
        WriteHbase(x)
        x
      }).print()

    env.execute("kafkahbase")
  }
}



pom 依赖

 <dependency>
                <groupId>junit</groupId>
                <artifactId>junit</artifactId>
                <version>4.12</version>
            </dependency>
            <dependency>
                <groupId>org.apache.hbase</groupId>
                <artifactId>hbase-client</artifactId>
                <version>1.2.1</version>
            </dependency>
            <dependency>
                <groupId>org.apache.hbase</groupId>
                <artifactId>hbase-common</artifactId>
                <version>1.2.1</version>
            </dependency>

        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-core_2.11</artifactId>
            <version>2.0.2</version>
        </dependency>

你可能感兴趣的:(flink 对接kafka写进hbase里(代码和pom))