如何使用@KafkaListener实现从nacos中动态获取监听的topic

1、简介

对于经常需要变更kafka主题的场景,为了实现动态监听topic的功能,可以使用以下方式。

2、使用步骤
2.1、添加依赖
XML 复制代码
<dependency>
   <groupId>org.springframework.kafka</groupId>
   <artifactId>spring-kafka</artifactId>
   <version>2.8.1</version>
</dependency>
2.2、nacos中配置
bash 复制代码
    # kafka 配置
spring:
  kafka:
    bootstrap-servers: ip地址:9092
    topics: topic1,tpic2
    producer:
      key-serializer: org.apache.kafka.common.serialization.StringSerializer
      value-serializer: org.apache.kafka.common.serialization.StringSerializer
      enable-idempotence: true
      acks: all
      transactional-id: kafka-group
    consumer:
      key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      group-id: kafka-clickhouse-group
      auto-offset-reset: latest
      enable-auto-commit: false
      isolation-level: read_committed
      allow-auto-create-topics: true
    listener:
      ack-mode: MANUAL_IMMEDIATE
      concurrency: 3
2.3、配置类
java 复制代码
package org.aecsi.kafkadatatock.config;

import lombok.RequiredArgsConstructor;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.ListTopicsResult;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.boot.ApplicationRunner;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.cloud.context.config.annotation.RefreshScope;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerEndpointRegistry;
import org.springframework.kafka.core.*;
import org.springframework.kafka.listener.ContainerProperties;
import org.springframework.kafka.transaction.KafkaTransactionManager;

import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;

@Configuration
@RequiredArgsConstructor
@EnableKafka
@RefreshScope
public class KafkaConfig {
    private final KafkaProperties kafkaProperties;

    @Bean
    public KafkaAdmin kafkaAdmin() {
        return new KafkaAdmin(kafkaProperties.buildAdminProperties());
    }

    @Bean
    public AdminClient adminClient(KafkaAdmin kafkaAdmin) {
        return AdminClient.create(kafkaAdmin.getConfigurationProperties());
    }

    @Bean
    public ProducerFactory<String, String> producerFactory() {
        Map<String, Object> configProps = new HashMap<>(kafkaProperties.buildProducerProperties());
        configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        configProps.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, true);
        configProps.put(ProducerConfig.ACKS_CONFIG, "all");
        configProps.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, "kafka-clickhouse-producer");
        DefaultKafkaProducerFactory<String, String> factory = new DefaultKafkaProducerFactory<>(configProps);
        factory.setTransactionIdPrefix("kafka-clickhouse-producer-");
        return factory;
    }

    @Bean
    public KafkaTemplate<String, String> kafkaTemplate(ProducerFactory<String, String> producerFactory) {
        return new KafkaTemplate<>(producerFactory);
    }

    @Bean
    @RefreshScope
    public ConsumerFactory<String, String> consumerFactory() {
        Map<String, Object> configProps = new HashMap<>(kafkaProperties.buildConsumerProperties());
        configProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        configProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        configProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
        configProps.put(ConsumerConfig.ALLOW_AUTO_CREATE_TOPICS_CONFIG, true);
        configProps.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, "read_committed");
        return new DefaultKafkaConsumerFactory<>(configProps);
    }

    @Bean
    public KafkaTransactionManager<String, String> transactionManager(ProducerFactory<String, String> producerFactory) {
        return new KafkaTransactionManager<>(producerFactory);
    }

    @Bean
    public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory(
            ConsumerFactory<String, String> consumerFactory,
            KafkaTransactionManager<String, String> transactionManager) {
        ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
        factory.setConsumerFactory(consumerFactory);
        factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL_IMMEDIATE);
        factory.getContainerProperties().setTransactionManager(transactionManager);
        return factory;
    }
    @Bean
    @RefreshScope
    public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
        ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
        factory.setConsumerFactory(consumerFactory());
        factory.setAutoStartup(true);
        return factory;
    }

    @Bean
    public ApplicationRunner kafkaListenerStarter(KafkaListenerEndpointRegistry registry) {
        return args -> {
            // 启动所有 Kafka 监听器
            registry.start();
        };
    }
}

接收消息类

java 复制代码
    @KafkaListener(topics = "#{'${spring.kafka.topics}'.split(',')}", autoStartup = "false")
    @Transactional(transactionManager = "transactionManager")
    public void processMessage(ConsumerRecord<String, String> record,
                               Acknowledgment acknowledgment,
                               @Header(KafkaHeaders.RECEIVED_TOPIC) String topic,
                               @Header(KafkaHeaders.RECEIVED_TIMESTAMP) long timestamp) {
        try {
            log.info("kafka 接受 topic: {} 消息", topic);
//          处理消息
            acknowledgment.acknowledge();
        } catch (Exception e) {
            log.error("Error processing message for topic {}: {}", topic, e.getMessage());
            throw e;
        }
    }

主启动类添加一个注解

java 复制代码
@EnableConfigurationProperties(KafkaProperties.class)
3、总结

实现kafka动态获取topic还有其他方式,博主目前只验证这一种,其他方式待更新。

相关推荐
南客先生3 小时前
互联网大厂Java面试:RocketMQ、RabbitMQ与Kafka的深度解析
java·面试·kafka·rabbitmq·rocketmq·消息中间件
淋一遍下雨天5 小时前
Spark-Streaming核心编程
大数据·kafka
樟小叶_公众号同名8 小时前
Kafka运维经验
后端·kafka
小名叫咸菜10 小时前
flume整合Kafka和spark-streaming核心编程
kafka·flume
什么芮.11 小时前
spark-streaming
pytorch·sql·spark·kafka·scala
桑榆080612 小时前
Kafka简介
spark·kafka
xmyLydia14 小时前
🚀 封装通用线程池 + Prometheus 可视化任务耗时与成功率(实战记录)
kafka·数据可视化
K8sCat14 小时前
Golang与Kafka的五大核心设计模式
后端·kafka·go
企鹅不耐热.15 小时前
KafkaSpark-Streaming
kafka