Java调试Kafka代码示例

一、Consumer代码示例
1、Jaas示例
 public static void main(String[] args) {
		Properties props = new Properties();
		props.put("bootstrap.servers", "x.x.x.x:9092");
		props.put("group.id", "group-1");
		props.put("enable.auto.commit", "true");
		props.put("auto.commit.interval.ms", "1000");
		// kafka 无组消费记录时的默认起始位置(earliest, latest)
		props.put("auto.offset.reset", "latest");
		props.put("session.timeout.ms", "30000");
		props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
		props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

		props.put("security.protocol", "SASL_PLAINTEXT");
		props.put("sasl.mechanism", "PLAIN");
		props.put("sasl.jaas.config",  "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"xxx\" password=\"xxx\";");

		KafkaConsumer kafkaConsumer = new KafkaConsumer<>(props);
		kafkaConsumer.subscribe(Arrays.asList("lanxin-desensitization-log"));
		while (true) {
			ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofSeconds(1));
			for (ConsumerRecord<String, String> record : records)
				System.out.println("Partition: " + record.partition() + " Offset: " + record.offset() + " Value: " + record.value() + " ThreadID: " + Thread.currentThread().getId());

		}
 }
2、Kerberos示例
	public static void main(String[] args) {
		System.setProperty("java.security.auth.login.config", "/etc/jaas.conf");
		System.setProperty("java.security.krb5.conf", "/etc/krb5.conf");
		Properties props = new Properties();
		props.put("bootstrap.servers", "x.x.x.x:9092");
		props.put("group.id", "group-1");
		props.put("enable.auto.commit", "false");
		props.put("auto.commit.interval.ms", "1000");
		props.put("auto.offset.reset", "earliest");
		props.put("session.timeout.ms", "30000");
		props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
		props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

		props.put("security.protocol", "SASL_PLAINTEXT");
		props.put("sasl.mechanism", "GSSAPI");
		props.put("sasl.kerberos.service.name", "kafka");

		KafkaConsumer kafkaConsumer = new KafkaConsumer<>(props);
		kafkaConsumer.subscribe(Arrays.asList("topic-test"));
		while (true) {
			ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofSeconds(1));
			for (ConsumerRecord<String, String> record : records)
				System.out.println("Partition: " + record.partition() + " Offset: " + record.offset() + " Value: " + record.value() + " ThreadID: " + Thread.currentThread().getId());

		}

	}
二、Producer代码示例
Producer<String, String> producer = new KafkaProducer<String, String>(props);
String msg= "test";
ProducerRecord<String, String> producerRecord = new ProducerRecord("topic-producer", msg);

三、解决Offset Explorer可以连接,krb5不通的问题,报错问题
1、报错信息如下:
Caused by:KrbException:Cannot locate KDC
		at sun.security.krb5.Config.getKDCList(Config.java:1061)
		at sun.security.krb5.KdcComm.send(KdcComm.java:218)
		at sun.security.krb5.KdcComm.send(KdcComm.java:200)
		at sun.security.krb5.KrbAsReqBuilder.send(KrbAsReqBuilder.java:316)
		at sun.security.krb5.KrbAsReqBuilder.action(KrbAsReqBuilder.java:361)
		at sun.security.auth.module.Krb5LoginModule.attemptAuthentication(Krb5LoginModule.java:776)
		... 24 more
Caused by:KrbException:Generic error (description in e-text) (60) - Unable to locate KDC for realm ZZYC.COM
		at sun.security.krb5.Config.getKDCFromDNS(Config.java:1158)
		at sun.security.krb5.Config.getKDCList(Config.java:1034)
		... 29 more
2、绕过Krb5方法
public static void main(String[] args) {
	System.setProperty("java.security.auth.login.config", "D:\\jaas.conf");
	//System.setProperty("java.security.krb5.conf", "D:\\krb5.conf");
	Properties props = new Properties();
	props.put("bootstrap.servers", "x.x.x.x:9092");
	props.put("group.id", "group-1");
	props.put("enable.auto.commit", "false");
	props.put("auto.commit.interval.ms", "1000");
	props.put("auto.offset.reset", "earliest");
	props.put("session.timeout.ms", "30000");
	props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
	props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");

	//SASL_PLAINTEXT 更改为 PLAINTEXT
	props.put("security.protocol", "PLAINTEXT");
	//GSSAPI 更改为PLAIN
	props.put("sasl.mechanism", "PLAIN");
	props.put("sasl.kerberos.service.name", "kafka");

	KafkaConsumer kafkaConsumer = new KafkaConsumer<>(props);
	kafkaConsumer.subscribe(Arrays.asList("topic-log"));
	while (true) {
		ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofSeconds(1));
		for (ConsumerRecord<String, String> record : records)
			System.out.println("Partition: " + record.partition() + " Offset: " + record.offset() + " Value: " + record.value() + " ThreadID: " + Thread.currentThread().getId());
	}

}

你可能感兴趣的:(个人心得,kafka,java,大数据)