Kafka批量消息发送
1、pom.xml
xml
<!-- 引入spring-kafka的依赖 -->
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
2、application.properties 配置
properties
# Kafka 配置参数
ecp.sim.kafka.topic=sim-topic
# Spring Kafka Producer 批量发送配置
spring.kafka.bootstrap-servers=192.168.0.191:9092,192.168.0.192:9092,192.168.0.193:9092
spring.kafka.producer.retries=3
spring.kafka.producer.batch-size=16384
spring.kafka.producer.buffer-memory=33554432
spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.acks=1
# Spring Kafka Consumer 批量消费配置
spring.kafka.consumer.group-id=simulator-group
spring.kafka.consumer.enable-auto-commit=false
spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.max-poll-records=500
spring.kafka.listener.type=batch
spring.kafka.listener.ack-mode=manual
3、生产者和消费者
3.1、生产者
java
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.UUID;
/**
* Kafka 生产者实现,支持批量发送
*/
@Service
public class KafkaProducerImpl {
private static final Logger logger = LoggerFactory.getLogger(KafkaProducerImpl.class);
@Autowired
private KafkaTemplate<String, String> kafkaTemplate;
/**
* 发送单条消息
*
* @param topic 目标 topic
* @param message 消息内容
*/
public void sendMessage(String topic, String message) {
// TODO 消息ID需要处理一下,保证全局唯一
String messageId = UUID.randomUUID().toString();
logger.info("发送单条消息, topic: {}, messageId: {}, message: {}", topic, messageId, message);
kafkaTemplate.send(topic, messageId, message);
}
/**
* 批量发送消息
*
* @param topic 目标 topic
* @param messages 消息列表
*/
public void sendMessagesBatch(String topic, List<String> messages) {
if (messages == null || messages.isEmpty()) {
return;
}
logger.info("批量发送消息, topic: {}, 数量: {}", topic, messages.size());
for (String message : messages) {
// TODO 消息ID需要处理一下,保证全局唯一
String messageId = UUID.randomUUID().toString();
kafkaTemplate.send(topic, messageId, message);
}
}
}
3.2、消费者
java
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Service;
import java.util.List;
/**
* Kafka 消费者实现,支持批量消费
*/
@Service
public class KafkaConsumerImpl {
private static final Logger logger = LoggerFactory.getLogger(KafkaConsumerImpl.class);
/**
* 批量消费消息
*
* @param records 消息记录列表
* @param acknowledgment 手动提交 ack
*/
@KafkaListener(topics = "${ecp.sim.kafka.topic:sim-topic}", containerFactory = "kafkaListenerContainerFactory")
public void consumeBatch(List<ConsumerRecord<String, String>> records, Acknowledgment acknowledgment) {
if (records == null || records.isEmpty()) {
return;
}
logger.info("开始批量消费消息, 本批次数量: {}", records.size());
try {
for (ConsumerRecord<String, String> record : records) {
logger.info("消费单条消息: topic = {}, partition = {}, offset = {}, key = {}, value = {}",
record.topic(), record.partition(), record.offset(), record.key(), record.value());
// TODO: 添加实际的业务处理逻辑
}
// 业务处理成功后手动提交 offset
if (acknowledgment != null) {
acknowledgment.acknowledge();
logger.info("本批次消息消费成功, offset 已提交");
}
} catch (Exception e) {
logger.error("批量消费消息时发生异常", e);
// 发生异常时可以根据业务需求决定是否重试、记录死信队列或不提交 ack
}
}
}