1. 引入依赖
xml
复制代码
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>3.0.1</version>
</dependency>
2. yaml配置信息
yaml
复制代码
commons:
kafka:
enabled: true #是否开启kafka
bootstrapServers: 127.0.0.1:9092
producer:
keySerializer: org.apache.kafka.common.serialization.StringSerializer
valueSerializer: org.apache.kafka.common.serialization.StringSerializer
consumer:
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
group-id: msg-group # 消费者组
auto-offset-reset: earliest
3. 配置类
java
复制代码
@Data
@Configuration
@ConfigurationProperties(prefix = "commons.kafka")
public class CommonsKafkaProperties {
private String bootstrapServers;
private String bootstrapServers111;
private Producer producer;
private Consumer consumer;
@Data
public static class Producer {
private String keySerializer;
private String valueSerializer;
private Properties props = new Properties();
}
@Data
public static class Consumer {
private String keyDeserializer;
private String valueDeserializer;
private String autoOffsetReset;
private String groupId;
private Properties props = new Properties();
}
}
4. kafka生产者消费者配置类
java
复制代码
public class KafkaConfiguration {
@Resource
private CommonsKafkaProperties commonsKafkaProperties;
@Bean
@ConditionalOnProperty(value = "commons.kafka.enabled", havingValue = "true")
public KafkaProducer<String,String> kafkaProducer() {
Properties props = new Properties();
props.put("bootstrap.servers", commonsKafkaProperties.getBootstrapServers());
props.put("key.serializer", commonsKafkaProperties.getProducer().getKeySerializer());
props.put("value.serializer", commonsKafkaProperties.getProducer().getValueSerializer());
props.putAll(commonsKafkaProperties.getProducer().getProps());
return new KafkaProducer<>(props);
}
@Bean
@ConditionalOnProperty(value = "commons.kafka.enabled", havingValue = "true")
public KafkaConsumer<String, String> kafkaConsumer() {
Properties props = new Properties();
props.put("bootstrap.servers", commonsKafkaProperties.getBootstrapServers());
props.put("key.deserializer", commonsKafkaProperties.getConsumer().getKeyDeserializer());
props.put("value.deserializer", commonsKafkaProperties.getConsumer().getValueDeserializer());
props.put("group.id", commonsKafkaProperties.getConsumer().getGroupId());
props.put("auto.offset.reset", "earliest");
props.putAll(commonsKafkaProperties.getConsumer().getProps());
return new KafkaConsumer<>(props);
}
}
5. 发送消息
java
复制代码
@Slf4j
@RestController
@RequestMapping("/kafkaMsg")
public class MessageController {
@Resource
private KafkaProducer<String, String> kafkaProducer;
@GetMapping( "/send")
public void sendMessage(@RequestParam String message) {
ProducerRecord<String, String> record = new ProducerRecord<>("demoEvent", message);
record.headers().add("event","TSP-W".getBytes());
log.error("sendMessage demoEvent:{}", message);
kafkaProducer.send(record);
}
}
6. 消费者
6.1. 第一种方式
- 发送消息时,设置了消息头部信息;消费时,可根据头部信息消费数据
java
复制代码
@Slf4j
@Service
public class ApacheKafkaListener {
@Autowired
private KafkaConsumer<String, String> kafkaConsumer;
@PostConstruct
public void startListening() {
// 订阅主题
kafkaConsumer.subscribe(Collections.singletonList("demoEvent"));
// 启动一个线程来处理消息
new Thread(() -> {
try {
while (true) {
// 轮询消息
ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofMillis(100));
// 处理消息
records.forEach(this::handleMessage);
}
} catch (Exception e) {
log.error("handle message error", e);
} finally {
kafkaConsumer.close();
}
}).start();
}
private void handleMessage(ConsumerRecord<String, String> record) {
// 解析自定义头部
String eventHeader = null;
if (record.headers() != null) {
eventHeader = new String(record.headers().lastHeader("event").value());
}
if (ObjectUtils.nullSafeEquals(eventHeader, "TSP-W")) {
// 解析消息体
String key = record.key();
String value = record.value();
// 处理逻辑
log.error("Received message: key = {}, value = {}, eventHeader = {}", key, value, eventHeader);
}
}
}
6.2. 第二种方式
- 将每个主题的消费者定义成独立的Bean,可增强代码的灵活性和可扩展性。这种做法不仅能针对各个主题进行精细化的配置,还能满足不同场景下的需求。
java
复制代码
@Slf4j
@Configuration
public class ApacheKafkaConsumer {
@Resource
private CommonsKafkaProperties commonsKafkaProperties;
@Bean
@ConditionalOnProperty(value = "commons.kafka.enabled", havingValue = "false")
public KafkaConsumer<String, String> handle() {
Properties props = new Properties();
props.put("key.deserializer", commonsKafkaProperties.getConsumer().getKeyDeserializer());
props.put("value.deserializer", commonsKafkaProperties.getConsumer().getValueDeserializer());
props.put("bootstrap.servers", commonsKafkaProperties.getBootstrapServers());
props.put("group.id", commonsKafkaProperties.getConsumer().getGroupId());
props.put("max.partition.fetch.bytes", 10485760);
props.put("max.poll.interval.ms", 600000);
props.put("io.kafka.record.handle.threadpool.coreSize", 5);
props.put("io.kafka.record.handle.threadpool.maxSize", 10);
props.put("io.kafka.record.handle.threadpool.queueSize", 5);
props.put("io.kafka.client.name", "receiveDemoEvent");
props.put("io.kafka.record.handle.threadpool.namePrefix", "DemoEvent");
props.putAll(commonsKafkaProperties.getConsumer().getProps());
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
consumer.subscribe(Collections.singletonList("lowLoadEvent"));
// 启动消费消息的线程
new Thread(() -> {
while (true) {
try {
// 使用poll获取消息
ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100));
for (ConsumerRecord<String, String> record : records) {
log.error("消费到消息: {}", record.value());
}
} catch (Exception e) {
log.error("Error while consuming messages", e);
}
}
}).start();
return consumer;
}
}