Apache Kafka使用

1. 引入依赖

xml 复制代码
 <dependency>
       <groupId>org.apache.kafka</groupId>
       <artifactId>kafka-clients</artifactId>
       <version>3.0.1</version>
   </dependency>

2. yaml配置信息

yaml 复制代码
commons:
  kafka:
    enabled: true   #是否开启kafka
    bootstrapServers: 127.0.0.1:9092
    producer:
      keySerializer: org.apache.kafka.common.serialization.StringSerializer
      valueSerializer: org.apache.kafka.common.serialization.StringSerializer
    consumer:
      key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      group-id: msg-group # 消费者组
      auto-offset-reset: earliest

3. 配置类

java 复制代码
@Data
@Configuration
@ConfigurationProperties(prefix = "commons.kafka")
public class CommonsKafkaProperties {

    private String bootstrapServers;
    private String bootstrapServers111;
    private Producer producer;
    private Consumer consumer;

    @Data
    public static class Producer {
        private String keySerializer;
        private String valueSerializer;
        private Properties props = new Properties();
    }

    @Data
    public static class Consumer {
        private String keyDeserializer;
        private String valueDeserializer;
        private String autoOffsetReset;
        private String groupId;
        private Properties props = new Properties();
    }
}

4. kafka生产者消费者配置类

java 复制代码
public class KafkaConfiguration {

    @Resource
    private CommonsKafkaProperties commonsKafkaProperties;

    @Bean
    @ConditionalOnProperty(value = "commons.kafka.enabled", havingValue = "true")
    public KafkaProducer<String,String> kafkaProducer() {
        Properties props = new Properties();
        props.put("bootstrap.servers", commonsKafkaProperties.getBootstrapServers());
        props.put("key.serializer", commonsKafkaProperties.getProducer().getKeySerializer());
        props.put("value.serializer", commonsKafkaProperties.getProducer().getValueSerializer());
        props.putAll(commonsKafkaProperties.getProducer().getProps());
        return new KafkaProducer<>(props);
    }

    @Bean
    @ConditionalOnProperty(value = "commons.kafka.enabled", havingValue = "true")
    public KafkaConsumer<String, String> kafkaConsumer() {
        Properties props = new Properties();
        props.put("bootstrap.servers", commonsKafkaProperties.getBootstrapServers());
        props.put("key.deserializer", commonsKafkaProperties.getConsumer().getKeyDeserializer());
        props.put("value.deserializer", commonsKafkaProperties.getConsumer().getValueDeserializer());
        props.put("group.id", commonsKafkaProperties.getConsumer().getGroupId());
        props.put("auto.offset.reset", "earliest");
        props.putAll(commonsKafkaProperties.getConsumer().getProps());
        return new KafkaConsumer<>(props);
    }
}

5. 发送消息

java 复制代码
@Slf4j
@RestController
@RequestMapping("/kafkaMsg")
public class MessageController {

    @Resource
    private KafkaProducer<String, String> kafkaProducer;

    @GetMapping( "/send")
    public void sendMessage(@RequestParam String message) {
        ProducerRecord<String, String> record = new ProducerRecord<>("demoEvent", message);
        record.headers().add("event","TSP-W".getBytes());
        log.error("sendMessage demoEvent:{}", message);
        kafkaProducer.send(record);
    }
}

6. 消费者

6.1. 第一种方式

  • 发送消息时,设置了消息头部信息;消费时,可根据头部信息消费数据
java 复制代码
@Slf4j
@Service
public class ApacheKafkaListener {

    @Autowired
    private KafkaConsumer<String, String> kafkaConsumer;

    @PostConstruct
    public void startListening() {
        // 订阅主题
        kafkaConsumer.subscribe(Collections.singletonList("demoEvent"));

        // 启动一个线程来处理消息
        new Thread(() -> {
            try {
                while (true) {
                    // 轮询消息
                    ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofMillis(100));
                    // 处理消息
                    records.forEach(this::handleMessage);
                }
            } catch (Exception e) {
                log.error("handle message error", e);
            } finally {
                kafkaConsumer.close();
            }
        }).start();
    }

    private void handleMessage(ConsumerRecord<String, String> record) {
        // 解析自定义头部
        String eventHeader = null;
        if (record.headers() != null) {
            eventHeader = new String(record.headers().lastHeader("event").value());
        }
        if (ObjectUtils.nullSafeEquals(eventHeader, "TSP-W")) {
            // 解析消息体
            String key = record.key();
            String value = record.value();
            // 处理逻辑
            log.error("Received message: key = {}, value = {}, eventHeader = {}", key, value, eventHeader);
        }
    }
}

6.2. 第二种方式

  • 将每个主题的消费者定义成独立的Bean,可增强代码的灵活性和可扩展性。这种做法不仅能针对各个主题进行精细化的配置,还能满足不同场景下的需求。
java 复制代码
@Slf4j
@Configuration
public class ApacheKafkaConsumer {

    @Resource
    private CommonsKafkaProperties commonsKafkaProperties;

    @Bean
    @ConditionalOnProperty(value = "commons.kafka.enabled", havingValue = "false")
    public KafkaConsumer<String, String> handle() {
        Properties props = new Properties();
        props.put("key.deserializer", commonsKafkaProperties.getConsumer().getKeyDeserializer());
        props.put("value.deserializer", commonsKafkaProperties.getConsumer().getValueDeserializer());
        props.put("bootstrap.servers", commonsKafkaProperties.getBootstrapServers());
        props.put("group.id", commonsKafkaProperties.getConsumer().getGroupId());
        props.put("max.partition.fetch.bytes", 10485760);
        props.put("max.poll.interval.ms", 600000);
        props.put("io.kafka.record.handle.threadpool.coreSize", 5);
        props.put("io.kafka.record.handle.threadpool.maxSize", 10);
        props.put("io.kafka.record.handle.threadpool.queueSize", 5);
        props.put("io.kafka.client.name", "receiveDemoEvent");
        props.put("io.kafka.record.handle.threadpool.namePrefix", "DemoEvent");
        props.putAll(commonsKafkaProperties.getConsumer().getProps());
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
        consumer.subscribe(Collections.singletonList("lowLoadEvent"));
        // 启动消费消息的线程
        new Thread(() -> {
            while (true) {
                try {
                    // 使用poll获取消息
                    ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100));
                    for (ConsumerRecord<String, String> record : records) {
                        log.error("消费到消息: {}", record.value());
                    }
                } catch (Exception e) {
                    log.error("Error while consuming messages", e);
                }
            }
        }).start();
        return consumer;
    }
}
相关推荐
予枫的编程笔记3 小时前
【Java进阶2】Java常用消息中间件深度解析:特性、架构与适用场景
java·kafka·rabbitmq·rocketmq·activemq
Apache IoTDB3 小时前
Apache IoTDB 社区荣获开放原子开发者大会多项殊荣丨「开源、产学研用融合」双认可
开源·开放原子·apache·iotdb
彭于晏Yan3 小时前
Spring集成kafka
spring·kafka
学博成4 小时前
在 Spring Boot 中使用 Kafka 并保证顺序性(Topic 分区为 100)的完整案例
spring boot·kafka
SelectDB14 小时前
5 倍性能提升,Apache Doris TopN 全局优化详解|Deep Dive
数据库·apache
zhougl99616 小时前
Vuex 模块命名冲突:问题解析与完整解决方案
linux·服务器·apache
I · T · LUCKYBOOM18 小时前
1.Apache网站优化
linux·运维·服务器·网络·apache
一人の梅雨1 天前
微店商品详情接口深度解析:从多端适配到全链路数据补全
apache
yumgpkpm1 天前
Cloudera CDP 7.3(国产CMP 鲲鹏版)平台与银行五大平台的技术对接方案
大数据·人工智能·hive·zookeeper·flink·kafka·cloudera