Kafka批量消息发送

Kafka批量消息发送

1、pom.xml

xml 复制代码
<!-- 引入spring-kafka的依赖 -->
<dependency>
    <groupId>org.springframework.kafka</groupId>
    <artifactId>spring-kafka</artifactId>
</dependency>

2、application.properties 配置

properties 复制代码
# Kafka 配置参数
ecp.sim.kafka.topic=sim-topic

# Spring Kafka Producer 批量发送配置
spring.kafka.bootstrap-servers=192.168.0.191:9092,192.168.0.192:9092,192.168.0.193:9092
spring.kafka.producer.retries=3
spring.kafka.producer.batch-size=16384
spring.kafka.producer.buffer-memory=33554432
spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.acks=1

# Spring Kafka Consumer 批量消费配置
spring.kafka.consumer.group-id=simulator-group
spring.kafka.consumer.enable-auto-commit=false
spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.max-poll-records=500
spring.kafka.listener.type=batch
spring.kafka.listener.ack-mode=manual

3、生产者和消费者

3.1、生产者

java 复制代码
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;

import java.util.List;
import java.util.UUID;

/**
 * Kafka 生产者实现,支持批量发送
 */
@Service
public class KafkaProducerImpl {

    private static final Logger logger = LoggerFactory.getLogger(KafkaProducerImpl.class);

    @Autowired
    private KafkaTemplate<String, String> kafkaTemplate;

    /**
     * 发送单条消息
     *
     * @param topic 目标 topic
     * @param message 消息内容
     */
    public void sendMessage(String topic, String message) {
        // TODO 消息ID需要处理一下,保证全局唯一
        String messageId = UUID.randomUUID().toString();
        logger.info("发送单条消息, topic: {}, messageId: {}, message: {}", topic, messageId, message);
        kafkaTemplate.send(topic, messageId, message);
    }

    /**
     * 批量发送消息
     *
     * @param topic 目标 topic
     * @param messages 消息列表
     */
    public void sendMessagesBatch(String topic, List<String> messages) {
        if (messages == null || messages.isEmpty()) {
            return;
        }
        logger.info("批量发送消息, topic: {}, 数量: {}", topic, messages.size());
        for (String message : messages) {
            // TODO 消息ID需要处理一下,保证全局唯一
            String messageId = UUID.randomUUID().toString();
            kafkaTemplate.send(topic, messageId, message);
        }
    }
}

3.2、消费者

java 复制代码
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Service;

import java.util.List;

/**
 * Kafka 消费者实现,支持批量消费
 */
@Service
public class KafkaConsumerImpl {

    private static final Logger logger = LoggerFactory.getLogger(KafkaConsumerImpl.class);

    /**
     * 批量消费消息
     *
     * @param records 消息记录列表
     * @param acknowledgment 手动提交 ack
     */
    @KafkaListener(topics = "${ecp.sim.kafka.topic:sim-topic}", containerFactory = "kafkaListenerContainerFactory")
    public void consumeBatch(List<ConsumerRecord<String, String>> records, Acknowledgment acknowledgment) {
        if (records == null || records.isEmpty()) {
            return;
        }
        logger.info("开始批量消费消息, 本批次数量: {}", records.size());
        
        try {
            for (ConsumerRecord<String, String> record : records) {
                logger.info("消费单条消息: topic = {}, partition = {}, offset = {}, key = {}, value = {}",
                        record.topic(), record.partition(), record.offset(), record.key(), record.value());
                // TODO: 添加实际的业务处理逻辑
            }
            
            // 业务处理成功后手动提交 offset
            if (acknowledgment != null) {
                acknowledgment.acknowledge();
                logger.info("本批次消息消费成功, offset 已提交");
            }
        } catch (Exception e) {
            logger.error("批量消费消息时发生异常", e);
            // 发生异常时可以根据业务需求决定是否重试、记录死信队列或不提交 ack
        }
    }
}
相关推荐
wangchunting1 天前
Jvm-垃圾收集器
java·开发语言·jvm
weixin_464307631 天前
QT智能指针
java·数据库·qt
架构师沉默1 天前
程序员如何避免猝死?
java·后端·架构
Zzxy1 天前
快速搭建SpringBoot项目并整合MyBatis-Plus
java·spring boot
星如雨グッ!(๑•̀ㅂ•́)و✧1 天前
WebFlux onErrorContinue 和 onErrorResume使用详解
java·人工智能
电商API&Tina1 天前
电商数据采集API接口||合规优先、稳定高效、数据精准
java·javascript·数据库·python·json
guoji77881 天前
ChatGPT镜像站实战:从零设计高可用分布式任务调度系统
分布式·chatgpt
zdl6861 天前
Spring Boot文件上传
java·spring boot·后端
世界哪有真情1 天前
哇!绝了!原来这么简单!我的 Java 项目代码终于被 “拯救” 了!
java·后端
RMB Player1 天前
Spring Boot 集成飞书推送超详细教程:文本消息、签名校验、封装工具类一篇搞定
java·网络·spring boot·后端·spring·飞书