使用DefaultMQProducer进行数据的写入,这个不是在测试类中,是在Spark Structured Streaming 中添加一个叫RocketMQ的流处理数据源写入数据报错
19/09/20 22:29:25 INFO CodeGenerator: Code generated in 132.299007 ms
19/09/20 22:29:25 ERROR Utils: Aborting task
org.apache.rocketmq.client.exception.MQClientException: producerGroup can not equal DEFAULT_PRODUCER, please specify another one.
For more information, please visit the url, http://rocketmq.apache.org/docs/faq/
at org.apache.rocketmq.client.impl.producer.DefaultMQProducerImpl.checkConfig(DefaultMQProducerImpl.java:224)
at org.apache.rocketmq.client.impl.producer.DefaultMQProducerImpl.start(DefaultMQProducerImpl.java:176)
at org.apache.rocketmq.client.impl.producer.DefaultMQProducerImpl.start(DefaultMQProducerImpl.java:168)
at org.apache.rocketmq.client.producer.DefaultMQProducer.start(DefaultMQProducer.java:231)
at org.apache.spark.sql.rocketmq540.CachedRocketMqProducer$.org$apache$spark$sql$rocketmq540$CachedRocketMqProducer$$createRocketMqProducer(CachedRocketMqProducer.scala:59)
at org.apache.spark.sql.rocketmq540.CachedRocketMqProducer$$anon$1.load(CachedRocketMqProducer.scala:36)
at org.apache.spark.sql.rocketmq540.CachedRocketMqProducer$$anon$1.load(CachedRocketMqProducer.scala:33)
at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3524)
at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2317)
at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2280)
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2195)
at com.google.common.cache.LocalCache.get(LocalCache.java:3934)
at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3938)
at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4821)
at org.apache.spark.sql.rocketmq540.CachedRocketMqProducer$.getOrCreate(CachedRocketMqProducer.scala:71)
at org.apache.spark.sql.rocketmq540.RocKetMqStreamDataWriter.producer$lzycompute(RocketStreamWriter.scala:60)
at org.apache.spark.sql.rocketmq540.RocKetMqStreamDataWriter.producer(RocketStreamWriter.scala:60)
at org.apache.spark.sql.rocketmq540.RocKetMqStreamDataWriter.write(RocketStreamWriter.scala:65)
at org.apache.spark.sql.rocketmq540.RocKetMqStreamDataWriter.write(RocketStreamWriter.scala:55)
at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$$anonfun$run$3$$anonfun$apply$1.apply(WriteToDataSourceV2.scala:130)
at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$$anonfun$run$3$$anonfun$apply$1.apply(WriteToDataSourceV2.scala:130)
at scala.collection.Iterator$class.foreach(Iterator.scala:893)
at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$10$$anon$1.foreach(WholeStageCodegenExec.scala:612)
at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$$anonfun$run$3.apply(WriteToDataSourceV2.scala:130)
at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$$anonfun$run$3.apply(WriteToDataSourceV2.scala:129)
at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1411)
at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$.run(WriteToDataSourceV2.scala:135)
at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec$$anonfun$2.apply(WriteToDataSourceV2.scala:79)
at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec$$anonfun$2.apply(WriteToDataSourceV2.scala:78)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
at org.apache.spark.scheduler.Task.run(Task.scala:109)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
19/09/20 22:29:25 ERROR Utils: Aborting task
org.apache.rocketmq.client.exception.MQClientException: producerGroup can not equal DEFAULT_PRODUCER, please specify another one.
For more information, please visit the url, http://rocketmq.apache.org/docs/faq/
at org.apache.rocketmq.client.impl.producer.DefaultMQProducerImpl.checkConfig(DefaultMQProducerImpl.java:224)
at org.apache.rocketmq.client.impl.producer.DefaultMQProducerImpl.start(DefaultMQProducerImpl.java:176)
at org.apache.rocketmq.client.impl.producer.DefaultMQProducerImpl.start(DefaultMQProducerImpl.java:168)
at org.apache.rocketmq.client.producer.DefaultMQProducer.start(DefaultMQProducer.java:231)
at org.apache.spark.sql.rocketmq540.CachedRocketMqProducer$.org$apache$spark$sql$rocketmq540$CachedRocketMqProducer$$createRocketMqProducer(CachedRocketMqProducer.scala:59)
at org.apache.spark.sql.rocketmq540.CachedRocketMqProducer$$anon$1.load(CachedRocketMqProducer.scala:36)
at org.apache.spark.sql.rocketmq540.CachedRocketMqProducer$$anon$1.load(CachedRocketMqProducer.scala:33)
at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3524)
at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2317)
at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2280)
at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2195)
at com.google.common.cache.LocalCache.get(LocalCache.java:3934)
at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3938)
at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4821)
at org.apache.spark.sql.rocketmq540.CachedRocketMqProducer$.getOrCreate(CachedRocketMqProducer.scala:71)
at org.apache.spark.sql.rocketmq540.RocKetMqStreamDataWriter.producer$lzycompute(RocketStreamWriter.scala:60)
at org.apache.spark.sql.rocketmq540.RocKetMqStreamDataWriter.producer(RocketStreamWriter.scala:60)
at org.apache.spark.sql.rocketmq540.RocKetMqStreamDataWriter.write(RocketStreamWriter.scala:65)
at org.apache.spark.sql.rocketmq540.RocKetMqStreamDataWriter.write(RocketStreamWriter.scala:55)
at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$$anonfun$run$3$$anonfun$apply$1.apply(WriteToDataSourceV2.scala:130)
at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$$anonfun$run$3$$anonfun$apply$1.apply(WriteToDataSourceV2.scala:130)
at scala.collection.Iterator$class.foreach(Iterator.scala:893)
at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$10$$anon$1.foreach(WholeStageCodegenExec.scala:612)
at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$$anonfun$run$3.apply(WriteToDataSourceV2.scala:130)
at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$$anonfun$run$3.apply(WriteToDataSourceV2.scala:129)
at org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1411)
at org.apache.spark.sql.execution.datasources.v2.DataWritingSparkTask$.run(WriteToDataSourceV2.scala:135)
at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec$$anonfun$2.apply(WriteToDataSourceV2.scala:79)
at org.apache.spark.sql.execution.datasources.v2.WriteToDataSourceV2Exec$$anonfun$2.apply(WriteToDataSourceV2.scala:78)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
at org.apache.spark.scheduler.Task.run(Task.scala:109)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
19/09/20 22:29:25 ERROR Utils: Aborting task
后来发现是因为我没指定相关的group
private def createRocketMqProducer(producerConfiguration: ju.Map[String, Object]): Producer = {
val group:Object = producerConfiguration.getOrDefault(RocketMQSourceProvider.PRODUCER_GROUP,"default_group")
val nameServerAddress:Object = producerConfiguration.getOrDefault(RocketMQSourceProvider.NAMESRV_ADDR,"localhost:9876")
val rocketMqProducer: Producer = new Producer(group.toString)
// 这下面两行没写
rocketMqProducer.setNamesrvAddr(nameServerAddress.toString)
rocketMqProducer.start()
logDebug(s"Created a new instance of rocketMqProducer for $producerConfiguration.")
rocketMqProducer
}