1、简介
对于经常需要变更kafka主题的场景,为了实现动态监听topic的功能,可以使用以下方式。
2、使用步骤
2.1、添加依赖
XML
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
<version>2.8.1</version>
</dependency>
2.2、nacos中配置
bash
# kafka 配置
spring:
kafka:
bootstrap-servers: ip地址:9092
topics: topic1,tpic2
producer:
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
enable-idempotence: true
acks: all
transactional-id: kafka-group
consumer:
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
group-id: kafka-clickhouse-group
auto-offset-reset: latest
enable-auto-commit: false
isolation-level: read_committed
allow-auto-create-topics: true
listener:
ack-mode: MANUAL_IMMEDIATE
concurrency: 3
2.3、配置类
java
package org.aecsi.kafkadatatock.config;
import lombok.RequiredArgsConstructor;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.ListTopicsResult;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.boot.ApplicationRunner;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.cloud.context.config.annotation.RefreshScope;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerEndpointRegistry;
import org.springframework.kafka.core.*;
import org.springframework.kafka.listener.ContainerProperties;
import org.springframework.kafka.transaction.KafkaTransactionManager;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
@Configuration
@RequiredArgsConstructor
@EnableKafka
@RefreshScope
public class KafkaConfig {
private final KafkaProperties kafkaProperties;
@Bean
public KafkaAdmin kafkaAdmin() {
return new KafkaAdmin(kafkaProperties.buildAdminProperties());
}
@Bean
public AdminClient adminClient(KafkaAdmin kafkaAdmin) {
return AdminClient.create(kafkaAdmin.getConfigurationProperties());
}
@Bean
public ProducerFactory<String, String> producerFactory() {
Map<String, Object> configProps = new HashMap<>(kafkaProperties.buildProducerProperties());
configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
configProps.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, true);
configProps.put(ProducerConfig.ACKS_CONFIG, "all");
configProps.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, "kafka-clickhouse-producer");
DefaultKafkaProducerFactory<String, String> factory = new DefaultKafkaProducerFactory<>(configProps);
factory.setTransactionIdPrefix("kafka-clickhouse-producer-");
return factory;
}
@Bean
public KafkaTemplate<String, String> kafkaTemplate(ProducerFactory<String, String> producerFactory) {
return new KafkaTemplate<>(producerFactory);
}
@Bean
@RefreshScope
public ConsumerFactory<String, String> consumerFactory() {
Map<String, Object> configProps = new HashMap<>(kafkaProperties.buildConsumerProperties());
configProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
configProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
configProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
configProps.put(ConsumerConfig.ALLOW_AUTO_CREATE_TOPICS_CONFIG, true);
configProps.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, "read_committed");
return new DefaultKafkaConsumerFactory<>(configProps);
}
@Bean
public KafkaTransactionManager<String, String> transactionManager(ProducerFactory<String, String> producerFactory) {
return new KafkaTransactionManager<>(producerFactory);
}
@Bean
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory(
ConsumerFactory<String, String> consumerFactory,
KafkaTransactionManager<String, String> transactionManager) {
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory);
factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL_IMMEDIATE);
factory.getContainerProperties().setTransactionManager(transactionManager);
return factory;
}
@Bean
@RefreshScope
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
factory.setAutoStartup(true);
return factory;
}
@Bean
public ApplicationRunner kafkaListenerStarter(KafkaListenerEndpointRegistry registry) {
return args -> {
// 启动所有 Kafka 监听器
registry.start();
};
}
}
接收消息类
java
@KafkaListener(topics = "#{'${spring.kafka.topics}'.split(',')}", autoStartup = "false")
@Transactional(transactionManager = "transactionManager")
public void processMessage(ConsumerRecord<String, String> record,
Acknowledgment acknowledgment,
@Header(KafkaHeaders.RECEIVED_TOPIC) String topic,
@Header(KafkaHeaders.RECEIVED_TIMESTAMP) long timestamp) {
try {
log.info("kafka 接受 topic: {} 消息", topic);
// 处理消息
acknowledgment.acknowledge();
} catch (Exception e) {
log.error("Error processing message for topic {}: {}", topic, e.getMessage());
throw e;
}
}
主启动类添加一个注解
java
@EnableConfigurationProperties(KafkaProperties.class)
3、总结
实现kafka动态获取topic还有其他方式,博主目前只验证这一种,其他方式待更新。