kafka使用avro序列化和反序列化

使用avro生成entity文件可以查看这篇文章https://blog.csdn.net/u012062455/article/details/84889694

生产者代码

    public static void CustomerTest() {
        Properties kafkaProps = new Properties();
        kafkaProps.put("bootstrap.servers","192.168.0.31:9092,192.168.0.32:9092,192.168.0.33:9092");
        kafkaProps.put("key.serializer","org.apache.kafka.common.serialization.StringSerializer");
        kafkaProps.put("value.serializer","org.apache.kafka.common.serialization.ByteArraySerializer");

        KafkaProducer producer = new KafkaProducer(kafkaProps);
        for(int i = 0;i < 1000;i++){
            Customer customer = new Customer();
            customer.setEmail("[email protected]" + i);
            customer.setName("ric-" + i);
            customer.setId(i);
            customer.setImages(null);
            ByteArrayOutputStream out = new ByteArrayOutputStream();
            BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(out, (BinaryEncoder)null);
            SpecificDatumWriter writer = new SpecificDatumWriter(customer.getSchema());
            try {
                writer.write(customer, encoder);
                encoder.flush();
                out.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
            ProducerRecord record = new ProducerRecord("Customer","customer-"+i,out.toByteArray());
            producer.send(record);
        }
        producer.close();
    }

消费者代码

    public static void CustomerTest() {
        Properties kafkaProps = new Properties();
        kafkaProps.put("bootstrap.servers","192.168.0.31:9092,192.168.0.32:9092,192.168.0.33:9092");

        kafkaProps.put("key.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
        kafkaProps.put("value.deserializer","org.apache.kafka.common.serialization.ByteArrayDeserializer");

        kafkaProps.put("group.id","DemoAvroKafkaConsumer2");

        kafkaProps.put("auto.offset.reset","earliest");

        KafkaConsumer consumer = new KafkaConsumer(kafkaProps);

        consumer.subscribe(Collections.singletonList("Customer"));

        SpecificDatumReader reader = new SpecificDatumReader<>(Customer.getClassSchema());
        try {
            while (true){
                ConsumerRecords records = consumer.poll(10);
                for(ConsumerRecord record : records){
                    Decoder decoder = DecoderFactory.get().binaryDecoder(record.value(), null);
                    Customer customer = null;
                    try {
                        customer = reader.read(null,decoder);
                        System.out.println(record.key() + ":" + customer.get("id") + "\t" + customer.get("name") + "\t" + customer.get("email"));
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                }
            }
        } finally {
            consumer.close();
        }
    }

相关pom依赖


    
      org.apache.kafka
      kafka_2.11
      1.0.0
    
    
      org.apache.avro
      avro
      1.8.2
    
    
      org.apache.avro
      avro-tools
      1.8.2
    
    
      com.twitter
      bijection-avro_2.11
      0.9.6
    

你可能感兴趣的:(大数据,消息中间件,大数据)