1. Docker 快速搭建
bash
# docker-compose.yml
version: '3.8'
services:
zookeeper:
image: confluentinc/cp-zookeeper:7.5.0
ports:
- "2181:2181"
kafka:
image: confluentinc/cp-kafka:7.5.0
ports:
- "9092:9092"
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://localhost:9092
bash
docker-compose up -d
2. Spring Boot 集成
2.1 添加依赖
xml
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
2.2 配置
yaml
spring:
kafka:
bootstrap-servers: localhost:9092
producer:
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
consumer:
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
group-id: my-group
auto-offset-reset: earliest
3. 生产者
java
@Service
public class KafkaProducerService {
@Autowired
private KafkaTemplate<String, String> kafkaTemplate;
public void sendMessage(String topic, String message) {
kafkaTemplate.send(topic, message).addCallback(
result -> log.info("发送成功"),
ex -> log.error("发送失败", ex)
);
}
public void sendWithKey(String topic, String key, String message) {
kafkaTemplate.send(topic, key, message);
}
}
4. 消费者
java
@Service
public class KafkaConsumerService {
@KafkaListener(topics = "my-topic", groupId = "my-group")
public void consume(ConsumerRecord<String, String> record) {
log.info("收到消息: {}", record.value());
}
@KafkaListener(topics = "my-topic", groupId = "my-group", concurrency = "3")
public void consumeMulti(ConsumerRecord<String, String> record) {
log.info("收到消息: {}", record.value());
}
}
5. 消息序列化
5.1 JSON 序列化
yaml
spring:
kafka:
producer:
value-serializer: org.springframework.kafka.support.serializer.JsonSerializer
consumer:
value-deserializer: org.springframework.kafka.support.serializer.JsonDeserializer
properties:
spring.json.trusted.packages: com.example.*
java
// 发送对象
kafkaTemplate.send("order-topic", order);
// 消费对象
@KafkaListener(topics = "order-topic")
public void consumeOrder(Order order) {
log.info("收到订单: {}", order);
}
6. 常见问题
6.1 消息丢失
yaml
spring:
kafka:
producer:
acks: all # 必须等所有副本确认
consumer:
enable-auto-commit: false # 手动提交
java
@KafkaListener(topics = "my-topic")
public void consume(ConsumerRecord<String, String> record, Acknowledgment ack) {
try {
processMessage(record.value());
ack.acknowledge(); // 手动提交
} catch (Exception e) {
log.error("处理失败", e);
}
}
6.2 消息重复
业务层面实现幂等:
java
@KafkaListener(topics = "order-topic")
public void consume(ConsumerRecord<String, String> record) {
String id = record.key();
if (redis.setIfAbsent(id, "1")) {
processMessage(record.value());
}
}
6.3 消息顺序
相同 key 发送到相同分区:
java
kafkaTemplate.send("order-topic", orderId, message);
7. 总结
- Docker 一键搭建 Kafka 环境
- 生产者 通过 KafkaTemplate 发送消息
- 消费者 通过 @KafkaListener 监听消息
- 序列化 支持 String 和 JSON
- 可靠投递 生产者 acks=all + 消费者手动提交