python 操作Kafka kafka-python :生产者学习

1. 安装kafka-python

pip install kafka-python

1.1生产者

from kafka import KafkaProducer
from kafka.errors import KafkaError
#创建了一个生产者的对象
producer = KafkaProducer(bootstrap_servers=['localhost:9092'])

# 异步发送一个条数据,topic和value,没有key
future = producer.send('test', b'123456')

# 获取发送记录的metadata
try:
    record_metadata = future.get(timeout=10)
except KafkaError:
    log.exception()
    pass

# 发送记录的topic,分区,偏移量
print (record_metadata.topic)
print (record_metadata.partition)
print (record_metadata.offset)

# produce keyed messages to enable hashed partitioning
producer.send('test', key=b'foo', value=b'bar')

# 发送具有key和value的数据,使用msgpack序列化
producer = KafkaProducer(value_serializer=msgpack.dumps)
producer.send('test', {'key': 'value'})

# 生产json数据
producer = KafkaProducer(value_serializer=lambda m: json.dumps(m).encode('ascii'))
producer.send('test', {'key': 'value'})

# produce 异步发送100条数据
for _ in range(100):
    producer.send('test', b'msg')
#定义一个发送成功的回调函数
def on_send_success(record_metadata):
    print(record_metadata.topic)
    print(record_metadata.partition)
    print(record_metadata.offset)
#定义一个发送失败的回调函数
def on_send_error(excp):
    log.error('I am an errback', exc_info=excp)
    # handle exception

# produce asynchronously with callbacks
producer.send('test', b'raw_bytes').add_callback(on_send_success).add_errback(on_send_error)

# 把异步发送的数据全部发送出去
producer.flush()

# 定义重试的次数
producer = KafkaProducer(retries=5)

1.2 KafkaProducer的构造参数:

    1. bootstrap_servers :kafka节点或节点的列表,不一定需要罗列所有的kafka节点。格式为: ‘host[:port]’ 。默认值是:localhost:9092
    1. client_id(str) : 客户端id,默认值: ‘kafka-python-{version}’
    1. key_serializer(callable) :key序列化函数
    1. value_serializer (callable) :value序列化函数
    1. acks (0**,1**,'all') – 0不等待kafka服务端确认,1,等待leader确认,all等所有副本确认
    1. compression_type (str) :数据压缩类型,‘gzip’, ‘snappy’, ‘lz4’, 或者 None.
    1. retries (int) :设置重试次数
    1. batch_size (int)
    1. linger_ms (int)

1.3 KafkaProductor的函数

    1. close(timeout=None): 关闭生产者
    1. flush(timeout=None):强制发送异步数据
    1. metrics(raw=False):性能指标
    1. partitions_for(topic):返回topic所有的分区
    1. send(topic, value=None, key=None, partition=None, timestamp_ms=None):发送数据

1.4 KafkaProductor的工具类

# -*- coding: utf-8 -*-

"""
Create by Mr.Hao on 2019/10/18.

"""
import sys
import json
import traceback
from message import topics
from utils import get_logger
from kafka import KafkaProducer
from kafka.errors import KafkaError

logger = get_logger("ProductorForKFK")

class ProductorForKFK(object):

    # _MESSAGE_NAME = 'item_resource_index_test'

    def __init__(self, kafkahost, client_id):
        self.kafkaHost = kafkahost
        self.client_id = client_id
        self.producer = KafkaProducer(
            bootstrap_servers=kafkahost,
            # 传输时的压缩格式
            compression_type="gzip",
            # 每条消息的最大大小
            max_request_size=1024 * 1024 * 20,
            client_id=self.client_id,
            # 重试次数
            retries=3
        )

    def send(self, msg,  partition=None, times=0):

        if times > 3:
            logger.error('this %s, msg: %s, lost...', self._MESSAGE_NAME, msg)
            return False

        try:
            producer = self.producer
            # 发送到指定的消息主题(异步,不阻塞)
            record_metadata = producer.send(self._MESSAGE_NAME, value=msg, partition=partition)
            # 获取发送结果(阻塞),超时时间为空则一直等待
            record_metadata = record_metadata.get(timeout=60)
            logger.info("*"*100)
            logger.info("writer msg partition: %s", record_metadata.partition)
            logger.info("writer msg offset: %s", record_metadata.offset)
            logger.info("writer msg: %s", msg)
            producer.flush()
        except KafkaError as e:
            t, v, tb = sys.exc_info()
            logger.error("send msg ext has error, please check: %s, %s, %s", t, v, traceback.format_tb(tb))

            # 重试机制
            return self.send(msg, times + 1)

    def json_to_str(self, msg):
        return json.dumps(msg, ensure_ascii=False, encoding="utf-8")

    @staticmethod
    def str_to_json(msg):
        return json.loads(msg)

你可能感兴趣的:(python 操作Kafka kafka-python :生产者学习)