Apache Kafka使用

1. 引入依赖

xml 复制代码
 <dependency>
       <groupId>org.apache.kafka</groupId>
       <artifactId>kafka-clients</artifactId>
       <version>3.0.1</version>
   </dependency>

2. yaml配置信息

yaml 复制代码
commons:
  kafka:
    enabled: true   #是否开启kafka
    bootstrapServers: 127.0.0.1:9092
    producer:
      keySerializer: org.apache.kafka.common.serialization.StringSerializer
      valueSerializer: org.apache.kafka.common.serialization.StringSerializer
    consumer:
      key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      group-id: msg-group # 消费者组
      auto-offset-reset: earliest

3. 配置类

java 复制代码
@Data
@Configuration
@ConfigurationProperties(prefix = "commons.kafka")
public class CommonsKafkaProperties {

    private String bootstrapServers;
    private String bootstrapServers111;
    private Producer producer;
    private Consumer consumer;

    @Data
    public static class Producer {
        private String keySerializer;
        private String valueSerializer;
        private Properties props = new Properties();
    }

    @Data
    public static class Consumer {
        private String keyDeserializer;
        private String valueDeserializer;
        private String autoOffsetReset;
        private String groupId;
        private Properties props = new Properties();
    }
}

4. kafka生产者消费者配置类

java 复制代码
public class KafkaConfiguration {

    @Resource
    private CommonsKafkaProperties commonsKafkaProperties;

    @Bean
    @ConditionalOnProperty(value = "commons.kafka.enabled", havingValue = "true")
    public KafkaProducer<String,String> kafkaProducer() {
        Properties props = new Properties();
        props.put("bootstrap.servers", commonsKafkaProperties.getBootstrapServers());
        props.put("key.serializer", commonsKafkaProperties.getProducer().getKeySerializer());
        props.put("value.serializer", commonsKafkaProperties.getProducer().getValueSerializer());
        props.putAll(commonsKafkaProperties.getProducer().getProps());
        return new KafkaProducer<>(props);
    }

    @Bean
    @ConditionalOnProperty(value = "commons.kafka.enabled", havingValue = "true")
    public KafkaConsumer<String, String> kafkaConsumer() {
        Properties props = new Properties();
        props.put("bootstrap.servers", commonsKafkaProperties.getBootstrapServers());
        props.put("key.deserializer", commonsKafkaProperties.getConsumer().getKeyDeserializer());
        props.put("value.deserializer", commonsKafkaProperties.getConsumer().getValueDeserializer());
        props.put("group.id", commonsKafkaProperties.getConsumer().getGroupId());
        props.put("auto.offset.reset", "earliest");
        props.putAll(commonsKafkaProperties.getConsumer().getProps());
        return new KafkaConsumer<>(props);
    }
}

5. 发送消息

java 复制代码
@Slf4j
@RestController
@RequestMapping("/kafkaMsg")
public class MessageController {

    @Resource
    private KafkaProducer<String, String> kafkaProducer;

    @GetMapping( "/send")
    public void sendMessage(@RequestParam String message) {
        ProducerRecord<String, String> record = new ProducerRecord<>("demoEvent", message);
        record.headers().add("event","TSP-W".getBytes());
        log.error("sendMessage demoEvent:{}", message);
        kafkaProducer.send(record);
    }
}

6. 消费者

6.1. 第一种方式

  • 发送消息时,设置了消息头部信息;消费时,可根据头部信息消费数据
java 复制代码
@Slf4j
@Service
public class ApacheKafkaListener {

    @Autowired
    private KafkaConsumer<String, String> kafkaConsumer;

    @PostConstruct
    public void startListening() {
        // 订阅主题
        kafkaConsumer.subscribe(Collections.singletonList("demoEvent"));

        // 启动一个线程来处理消息
        new Thread(() -> {
            try {
                while (true) {
                    // 轮询消息
                    ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofMillis(100));
                    // 处理消息
                    records.forEach(this::handleMessage);
                }
            } catch (Exception e) {
                log.error("handle message error", e);
            } finally {
                kafkaConsumer.close();
            }
        }).start();
    }

    private void handleMessage(ConsumerRecord<String, String> record) {
        // 解析自定义头部
        String eventHeader = null;
        if (record.headers() != null) {
            eventHeader = new String(record.headers().lastHeader("event").value());
        }
        if (ObjectUtils.nullSafeEquals(eventHeader, "TSP-W")) {
            // 解析消息体
            String key = record.key();
            String value = record.value();
            // 处理逻辑
            log.error("Received message: key = {}, value = {}, eventHeader = {}", key, value, eventHeader);
        }
    }
}

6.2. 第二种方式

  • 将每个主题的消费者定义成独立的Bean,可增强代码的灵活性和可扩展性。这种做法不仅能针对各个主题进行精细化的配置,还能满足不同场景下的需求。
java 复制代码
@Slf4j
@Configuration
public class ApacheKafkaConsumer {

    @Resource
    private CommonsKafkaProperties commonsKafkaProperties;

    @Bean
    @ConditionalOnProperty(value = "commons.kafka.enabled", havingValue = "false")
    public KafkaConsumer<String, String> handle() {
        Properties props = new Properties();
        props.put("key.deserializer", commonsKafkaProperties.getConsumer().getKeyDeserializer());
        props.put("value.deserializer", commonsKafkaProperties.getConsumer().getValueDeserializer());
        props.put("bootstrap.servers", commonsKafkaProperties.getBootstrapServers());
        props.put("group.id", commonsKafkaProperties.getConsumer().getGroupId());
        props.put("max.partition.fetch.bytes", 10485760);
        props.put("max.poll.interval.ms", 600000);
        props.put("io.kafka.record.handle.threadpool.coreSize", 5);
        props.put("io.kafka.record.handle.threadpool.maxSize", 10);
        props.put("io.kafka.record.handle.threadpool.queueSize", 5);
        props.put("io.kafka.client.name", "receiveDemoEvent");
        props.put("io.kafka.record.handle.threadpool.namePrefix", "DemoEvent");
        props.putAll(commonsKafkaProperties.getConsumer().getProps());
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
        consumer.subscribe(Collections.singletonList("lowLoadEvent"));
        // 启动消费消息的线程
        new Thread(() -> {
            while (true) {
                try {
                    // 使用poll获取消息
                    ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100));
                    for (ConsumerRecord<String, String> record : records) {
                        log.error("消费到消息: {}", record.value());
                    }
                } catch (Exception e) {
                    log.error("Error while consuming messages", e);
                }
            }
        }).start();
        return consumer;
    }
}
相关推荐
DemonAvenger3 天前
Kafka性能调优:从参数配置到硬件选择的全方位指南
性能优化·kafka·消息队列
yumgpkpm5 天前
AI视频生成:Wan 2.2(阿里通义万相)在华为昇腾下的部署?
人工智能·hadoop·elasticsearch·zookeeper·flink·kafka·cloudera
予枫的编程笔记5 天前
【Kafka高级篇】避开Kafka原生重试坑,Java业务端自建DLQ体系,让消息不丢失、不积压
java·kafka·死信队列·消息中间件·消息重试·dlq·java业务开发
倚肆5 天前
在 Windows Docker 中安装 Kafka 并映射 Windows 端口
docker·kafka
james的分享5 天前
大数据领域核心 SQL 优化框架Apache Calcite介绍
大数据·sql·apache·calcite
莫寒清5 天前
Apache Tika
java·人工智能·spring·apache·知识图谱
Sheffield5 天前
如果把ZooKeeper按字面意思比作动物园管理员……
elasticsearch·zookeeper·kafka
归叶再无青5 天前
web服务安装部署、性能升级等(Apache、Nginx)
运维·前端·nginx·云原生·apache·bash
雪碧聊技术5 天前
kafka的下载、安装、启动
kafka