go get github.com/Shopify/sarama
package main
import (
"fmt"
"log"
"os"
"time"
"github.com/Shopify/sarama"
)
var Adddress = []string{"192.168.74.138:9092"}
func main() {
syncProducer(Adddress)
}
// 同步消息模式
func syncProducer(address []string) {
// 配置
config := sarama.NewConfig()
// 属性设置
config.Producer.Return.Successes = true
config.Producer.Timeout = 5 * time.Second
// 创建生产者
p, err := sarama.NewSyncProducer(address, config)
// 判断错误
if err != nil {
log.Printf("sarama.NewSyncProducer err, message = %s", err)
return
}
// 最后关闭生产者
defer p.Close()
// 主题名称
topic := "topic1"
// 消息
srcValue := "sync: this is message. index = %d"
// 循环发消息
for i := 0; i < 10; i++ {
// 格式化消息
value := fmt.Sprintf(srcValue, i)
// 创建消息
msg := &sarama.ProducerMessage{
Topic: topic,
Value: sarama.ByteEncoder(value),
}
// 发消息
part, offset, err := p.SendMessage(msg)
if err != nil {
log.Printf("send message(%s) err = %s\n", value, err)
} else {
fmt.Fprintf(os.Stdout, value+"发送成功, partition=%d, offset=%d\n", part, offset)
}
// 每隔两秒发送一次消息
time.Sleep(2 * time.Second)
}
}
运行结果:
[Running] go run "e:\golang开发学习\kafka-golang-client\producer.go"
sync: this is message. index = 0发送成功, partition=0, offset=0
sync: this is message. index = 1发送成功, partition=0, offset=1
sync: this is message. index = 2发送成功, partition=0, offset=2
sync: this is message. index = 3发送成功, partition=0, offset=3
sync: this is message. index = 4发送成功, partition=0, offset=4
sync: this is message. index = 5发送成功, partition=0, offset=5
sync: this is message. index = 6发送成功, partition=0, offset=6
sync: this is message. index = 7发送成功, partition=0, offset=7
sync: this is message. index = 8发送成功, partition=0, offset=8
sync: this is message. index = 9发送成功, partition=0, offset=9
[Done] exited with code=0 in 29.595 seconds
Kafka查看:
//异步消费者(Goroutines):用不同的goroutine异步读取Successes和Errors channel
func asyncProducer1(address []string) {
config := sarama.NewConfig()
config.Producer.Return.Successes = true
//config.Producer.Partitioner = 默认为message的hash
p, err := sarama.NewAsyncProducer(address, config)
if err != nil {
log.Printf("sarama.NewSyncProducer err, message=%s \n", err)
return
}
//Trap SIGINT to trigger a graceful shutdown.
signals := make(chan os.Signal, 1)
signal.Notify(signals, os.Interrupt)
var wg sync.WaitGroup
var enqueued, successes, errors int
wg.Add(2) //2 goroutine
// 发送成功message计数
go func() {
defer wg.Done()
for range p.Successes() {
successes++
}
}()
// 发送失败计数
go func() {
defer wg.Done()
for err := range p.Errors() {
log.Printf("%s 发送失败,err:%s\n", err.Msg, err.Err)
errors++
}
}()
// 循环发送信息
asrcValue := "async-goroutine: this is a message. index=%d"
var i int
Loop:
for {
i++
value := fmt.Sprintf(asrcValue, i)
msg := &sarama.ProducerMessage{
Topic:"test",
Value:sarama.ByteEncoder(value),
}
select {
case p.Input() <- msg: // 发送消息
enqueued++
fmt.Fprintln(os.Stdout, value)
case <-signals: // 中断信号
p.AsyncClose()
break Loop
}
time.Sleep(2 * time.Second)
}
wg.Wait()
fmt.Fprintf(os.Stdout, "发送数=%d,发送成功数=%d,发送失败数=%d \n", enqueued, successes, errors)
}
//异步消费者(Select):同一线程内,通过select同时发送消息 和 处理errors计数。
//该方式效率较低,如果有大量消息发送, 很容易导致success和errors的case无法执行,从而阻塞一定时间。
//当然可以通过设置config.Producer.Return.Successes=false;config.Producer.Return.Errors=false来解决
func asyncProducer2(address []string) {
config := sarama.NewConfig()
config.Producer.Return.Errors = true
p, err := sarama.NewAsyncProducer(address, config)
if err != nil {
log.Printf("sarama.NewSyncProducer err, message=%s \n", err)
return
}
//Trap SIGINT to trigger a graceful shutdown.
signals := make(chan os.Signal, 1)
signal.Notify(signals, os.Interrupt)
var enqueued, successes, errors int
asrcValue := "async-select: this is a message. index=%d"
var i int
Loop:
for {
i++
value := fmt.Sprintf(asrcValue, i)
msg := &sarama.ProducerMessage{
Topic:"test",
Value:sarama.ByteEncoder(value),
}
select {
case p.Input() <- msg:
fmt.Fprintln(os.Stdout, value)
enqueued++
case <-p.Successes():
successes++
case err := <-p.Errors():
log.Printf("%s 发送失败,err:%s\n", err.Msg, err.Err)
errors++
case <-signals:
p.AsyncClose()
break Loop
}
time.Sleep(2 * time.Second)
}
fmt.Fprintf(os.Stdout, "发送数=%d,发送失败数=%d \n", enqueued, errors)
}
go get github.com/Shopify/sarama
package main
import (
"fmt"
"time"
"github.com/Shopify/sarama"
cluster "github.com/bsm/sarama-cluster"
)
var (
kafkaConsumer *cluster.Consumer
kafkaBrokers = []string{"192.168.74.138:9092"}
kafkaTopic = "topic1"
groupId = "test_1"
)
func init() {
// 配置
var err error
config := cluster.NewConfig()
config.Consumer.Return.Errors = true
config.Group.Return.Notifications = true
config.Consumer.Group.Rebalance.Strategy = sarama.BalanceStrategyRange
config.Consumer.Offsets.Initial = -2
config.Consumer.Offsets.CommitInterval = 1 * time.Second
config.Group.Return.Notifications = true
// 创建消费者
kafkaConsumer, err = cluster.NewConsumer(kafkaBrokers, groupId, []string{kafkaTopic}, config)
if err != nil {
panic(err.Error())
}
if kafkaConsumer == nil {
panic(fmt.Sprintf("consumer is nil. kafka info -> {brokers: %v, topic: %v, group: %v}", kafkaBrokers, kafkaTopic, groupId))
}
fmt.Println("kafka init success, consumer -> %v, topic -> %v", kafkaConsumer, kafkaTopic)
}
func main() {
for {
select {
case msg, ok := <-kafkaConsumer.Messages():
if ok {
fmt.Println("kafka 接收到的消息: %s \n", msg.Value)
kafkaConsumer.MarkOffset(msg, "")
} else {
fmt.Println("kafka 监听服务失败")
}
case err, ok := <-kafkaConsumer.Errors():
if ok {
fmt.Println("consumer err : %v", err)
}
case ntf, ok := <-kafkaConsumer.Notifications():
if ok {
fmt.Println("consumer notification : %v", ntf)
}
}
}
}