Apache avro转confluent avro

import org.apache.avro.Schema

import org.apache.avro.generic.{GenericData, GenericRecord}

import io.confluent.kafka.serializers.KafkaAvroSerializer

 

val schemaString = """{"type":"record","name":"Test","fields":[{"name":"foo","type":"string"}]}"""

val schema = new Schema.Parser().parse(schemaString)

 

val properties = new Properties()

properties.setProperty("bootstrap.servers", "localhost:9092")

properties.setProperty("schema.registry.url", "http://localhost:8081")

properties.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")

properties.setProperty("value.serializer", "io.confluent.kafka.serializers.KafkaAvroSerializer")

 

val producer = new KafkaProducer[String, GenericRecord](properties)

 

val record = new GenericData.Record(schema)

record.put("foo", "bar")

 

val avroSerializer = new KafkaAvroSerializer()

avroSerializer.configure(properties, false)

 

val avroSchemaId = 1 // Replace with the actual schema ID retrieved from Schema Registry

val serializedData = avroSerializer.serialize("test-topic", record)

 

val kafkaRecord = new ProducerRecord[String, Array[Byte]]("test-topic", null, serializedData)

kafkaRecord.headers().add("value.schema.id", avroSchemaId.toString.getBytes(StandardCharsets.UTF_8))

 

producer.send(kafkaRecord)

 

你可能感兴趣的:(kafka)