Apache Kafka使用

1. 引入依赖

xml 复制代码
 <dependency>
       <groupId>org.apache.kafka</groupId>
       <artifactId>kafka-clients</artifactId>
       <version>3.0.1</version>
   </dependency>

2. yaml配置信息

yaml 复制代码
commons:
  kafka:
    enabled: true   #是否开启kafka
    bootstrapServers: 127.0.0.1:9092
    producer:
      keySerializer: org.apache.kafka.common.serialization.StringSerializer
      valueSerializer: org.apache.kafka.common.serialization.StringSerializer
    consumer:
      key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      group-id: msg-group # 消费者组
      auto-offset-reset: earliest

3. 配置类

java 复制代码
@Data
@Configuration
@ConfigurationProperties(prefix = "commons.kafka")
public class CommonsKafkaProperties {

    private String bootstrapServers;
    private String bootstrapServers111;
    private Producer producer;
    private Consumer consumer;

    @Data
    public static class Producer {
        private String keySerializer;
        private String valueSerializer;
        private Properties props = new Properties();
    }

    @Data
    public static class Consumer {
        private String keyDeserializer;
        private String valueDeserializer;
        private String autoOffsetReset;
        private String groupId;
        private Properties props = new Properties();
    }
}

4. kafka生产者消费者配置类

java 复制代码
public class KafkaConfiguration {

    @Resource
    private CommonsKafkaProperties commonsKafkaProperties;

    @Bean
    @ConditionalOnProperty(value = "commons.kafka.enabled", havingValue = "true")
    public KafkaProducer<String,String> kafkaProducer() {
        Properties props = new Properties();
        props.put("bootstrap.servers", commonsKafkaProperties.getBootstrapServers());
        props.put("key.serializer", commonsKafkaProperties.getProducer().getKeySerializer());
        props.put("value.serializer", commonsKafkaProperties.getProducer().getValueSerializer());
        props.putAll(commonsKafkaProperties.getProducer().getProps());
        return new KafkaProducer<>(props);
    }

    @Bean
    @ConditionalOnProperty(value = "commons.kafka.enabled", havingValue = "true")
    public KafkaConsumer<String, String> kafkaConsumer() {
        Properties props = new Properties();
        props.put("bootstrap.servers", commonsKafkaProperties.getBootstrapServers());
        props.put("key.deserializer", commonsKafkaProperties.getConsumer().getKeyDeserializer());
        props.put("value.deserializer", commonsKafkaProperties.getConsumer().getValueDeserializer());
        props.put("group.id", commonsKafkaProperties.getConsumer().getGroupId());
        props.put("auto.offset.reset", "earliest");
        props.putAll(commonsKafkaProperties.getConsumer().getProps());
        return new KafkaConsumer<>(props);
    }
}

5. 发送消息

java 复制代码
@Slf4j
@RestController
@RequestMapping("/kafkaMsg")
public class MessageController {

    @Resource
    private KafkaProducer<String, String> kafkaProducer;

    @GetMapping( "/send")
    public void sendMessage(@RequestParam String message) {
        ProducerRecord<String, String> record = new ProducerRecord<>("demoEvent", message);
        record.headers().add("event","TSP-W".getBytes());
        log.error("sendMessage demoEvent:{}", message);
        kafkaProducer.send(record);
    }
}

6. 消费者

6.1. 第一种方式

  • 发送消息时,设置了消息头部信息;消费时,可根据头部信息消费数据
java 复制代码
@Slf4j
@Service
public class ApacheKafkaListener {

    @Autowired
    private KafkaConsumer<String, String> kafkaConsumer;

    @PostConstruct
    public void startListening() {
        // 订阅主题
        kafkaConsumer.subscribe(Collections.singletonList("demoEvent"));

        // 启动一个线程来处理消息
        new Thread(() -> {
            try {
                while (true) {
                    // 轮询消息
                    ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofMillis(100));
                    // 处理消息
                    records.forEach(this::handleMessage);
                }
            } catch (Exception e) {
                log.error("handle message error", e);
            } finally {
                kafkaConsumer.close();
            }
        }).start();
    }

    private void handleMessage(ConsumerRecord<String, String> record) {
        // 解析自定义头部
        String eventHeader = null;
        if (record.headers() != null) {
            eventHeader = new String(record.headers().lastHeader("event").value());
        }
        if (ObjectUtils.nullSafeEquals(eventHeader, "TSP-W")) {
            // 解析消息体
            String key = record.key();
            String value = record.value();
            // 处理逻辑
            log.error("Received message: key = {}, value = {}, eventHeader = {}", key, value, eventHeader);
        }
    }
}

6.2. 第二种方式

  • 将每个主题的消费者定义成独立的Bean,可增强代码的灵活性和可扩展性。这种做法不仅能针对各个主题进行精细化的配置,还能满足不同场景下的需求。
java 复制代码
@Slf4j
@Configuration
public class ApacheKafkaConsumer {

    @Resource
    private CommonsKafkaProperties commonsKafkaProperties;

    @Bean
    @ConditionalOnProperty(value = "commons.kafka.enabled", havingValue = "false")
    public KafkaConsumer<String, String> handle() {
        Properties props = new Properties();
        props.put("key.deserializer", commonsKafkaProperties.getConsumer().getKeyDeserializer());
        props.put("value.deserializer", commonsKafkaProperties.getConsumer().getValueDeserializer());
        props.put("bootstrap.servers", commonsKafkaProperties.getBootstrapServers());
        props.put("group.id", commonsKafkaProperties.getConsumer().getGroupId());
        props.put("max.partition.fetch.bytes", 10485760);
        props.put("max.poll.interval.ms", 600000);
        props.put("io.kafka.record.handle.threadpool.coreSize", 5);
        props.put("io.kafka.record.handle.threadpool.maxSize", 10);
        props.put("io.kafka.record.handle.threadpool.queueSize", 5);
        props.put("io.kafka.client.name", "receiveDemoEvent");
        props.put("io.kafka.record.handle.threadpool.namePrefix", "DemoEvent");
        props.putAll(commonsKafkaProperties.getConsumer().getProps());
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
        consumer.subscribe(Collections.singletonList("lowLoadEvent"));
        // 启动消费消息的线程
        new Thread(() -> {
            while (true) {
                try {
                    // 使用poll获取消息
                    ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100));
                    for (ConsumerRecord<String, String> record : records) {
                        log.error("消费到消息: {}", record.value());
                    }
                } catch (Exception e) {
                    log.error("Error while consuming messages", e);
                }
            }
        }).start();
        return consumer;
    }
}
相关推荐
空中海11 分钟前
Kafka Streams、Connect 与生态
分布式·kafka·linq
Knight_AL2 天前
从 0 到 1:PG WAL → Debezium → Kafka → Spring Boot → Redis
spring boot·redis·kafka
无籽西瓜a2 天前
【西瓜带你学Kafka | 第六期】Kafka 生产确认、消费 API 与分区分配策略(文含图解)
java·分布式·后端·kafka·消息队列·mq
无籽西瓜a2 天前
【西瓜带你学Kafka | 第七期】Kafka 日志存储体系:保留清理、消息格式与分段刷新策略(文含图解)
java·分布式·后端·kafka·消息队列·mq
冷小鱼3 天前
消息队列(MQ)技术全景科普:从选型到AI+未来
人工智能·kafka·rabbitmq·rocketmq·mq·pulsar
DolphinScheduler社区3 天前
DolphinScheduler 3.3.2 如何调用 DataX 3.0 + SeaTunnel 2.3.12?附 Demo演示!
java·spark·apache·海豚调度·大数据工作流调度
YaBingSec3 天前
玄机网络安全靶场:Apache HTTPD 解析漏洞(CVE-2017-15715)WP
java·笔记·安全·web安全·php·apache
运维老司机3 天前
Kafka 单节点部署(Docker Compose + 数据持久化)
分布式·docker·kafka
JAVA面经实录9173 天前
如何选择适合项目的「限流 / 熔断 / 降级」方案
java·spring·kafka·sentinel·guava
ezreal_pan4 天前
Kafka Docker 部署持久化避坑指南:解决重启后 Cluster ID 不匹配问题
分布式·docker·zookeeper·容器·kafka·devops