概述
本示例代码演示了 kafka 的使用,其中分别使用 spring boot kafka 和 spring cloud stream kafka 组件演示消息的发送和接收
SpringBootKafka
pom
pom中仅需要 spring-kafka 就可以,无需定义版本号,通过spring-boot-dependencies 可以自动找到合适的 version
            
            
              xml
              
              
            
          
          <dependencyManagement>
    <dependencies>
        <!-- spring boot 版本 -->
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-dependencies</artifactId>
            <version>${spring-boot.version}</version>
            <type>pom</type>
            <scope>import</scope>
        </dependency>
    </dependencies>
</dependencyManagement>
<dependencies>
    <!-- kafka -->
    <dependency>
        <groupId>org.springframework.kafka</groupId>
        <artifactId>spring-kafka</artifactId>
    </dependency>
</dependencies>
        yml
            
            
              yaml
              
              
            
          
          spring:
  application:
    name: boot-kafka
  # spring boot kafka 配置方式
  kafka:
    bootstrap-servers: 192.168.1.145:9092
    client-id: boot-kafka-client
        发送消息
通过 kafkaTemplate 发送消息
            
            
              java
              
              
            
          
          @Slf4j
@RequiredArgsConstructor
@RestController
@RequestMapping("/test-kafka")
public class TestKafkaController {
    private final KafkaTemplate kafkaTemplate;
    @GetMapping("/test-send")
    public String testSend() {
        String msg = UUID.randomUUID() + "测试消息内容,string类型";
        ListenableFuture future = kafkaTemplate.send(TEST_TOPIC, msg);
        //增加回调处理,包括成功回调和失败回调
        future.addCallback(result -> {
                    System.out.println("成功: " + result);
                },
                throwable -> {
                    System.err.println("失败: " + throwable.getMessage());
                });
        return "消息发送成功,msg=" + msg;
    }
    @GetMapping("/test-send2")
    public String testSend2() {
        String msg = UUID.randomUUID() + "测试消息内容,string类型";
        ListenableFuture future = kafkaTemplate.send(TEST_TOPIC, "beijing-partition", msg);
        //增加回调处理,包括成功回调和失败回调
        future.addCallback(result -> {
                    System.out.println("成功: " + result);
                },
                throwable -> {
                    System.err.println("失败: " + throwable.getMessage());
                });
        return "消息发送成功,msg=" + msg;
    }
    @GetMapping("/test-send3")
    public String testSend3() {
        String msg = UUID.randomUUID() + "测试消息内容,string类型";
        Message message = new Message() {
            @Override
            public Object getPayload() {
                return msg;
            }
            @Override
            public MessageHeaders getHeaders() {
                return new MessageHeaders(new HashMap<>() {{
                    put(MessageHeaders.ID, UUID.randomUUID().toString());
                    put(MessageHeaders.TIMESTAMP, System.currentTimeMillis());
                    put(KafkaHeaders.TOPIC, TEST_TOPIC);
                }});
            }
        };
        ListenableFuture future = kafkaTemplate.send(message);
        //增加回调处理,包括成功回调和失败回调
        future.addCallback(result -> {
                    System.out.println("成功: " + result);
                },
                throwable -> {
                    System.err.println("失败: " + throwable.getMessage());
                });
        return "消息发送成功,msg=" + msg;
    }
    @GetMapping("/test-send-uavState")
    public String testSendUavState() {
        String msg = UUID.randomUUID() + "测试消息内容,string类型";
        ListenableFuture future = kafkaTemplate.send(UAV_STATE_TOPIC, msg.getBytes(StandardCharsets.UTF_8));
        //增加回调处理,包括成功回调和失败回调
        future.addCallback(result -> {
                    System.out.println("成功: " + result);
                },
                throwable -> {
                    System.err.println("失败: " + throwable.getMessage());
                });
        return "消息发送成功,msg=" + msg;
    }
        接收消息
通过@KafkaListener 注解修饰的方法接收消息
            
            
              java
              
              
            
          
          @Slf4j
@RequiredArgsConstructor
@Component
public class TestKafkaLinstener {
    @KafkaListener(topics = TEST_TOPIC, groupId = "group-id-666")
    public void receiveMsg(Message<String> message) {
        System.out.println("监听到消息了,header信息如下:");
        MessageHeaders headers = message.getHeaders();
        headers.forEach((k, v) -> {
            System.out.println(k + ":" + v);
        });
        String payload = message.getPayload();
        System.out.println("payload信息如下:\n"+payload);
    }
}
        SpringCloudStreamKafka
SpringCloudStream的处理方式更加的抽象,stream2.x 和stream3.x使用的方式还不一样。
本例为stream3.2.9,使用 StreamBridge 发送消息,使用function接收消息
pom
仅需要引入spring-cloud-stream-binder-kafka即可,若之后更换了mq中间件为 rabbit ,仅需要替换为 spring-cloud-stream-binder-rabbit。
            
            
              xml
              
              
            
          
              <dependencyManagement>
        <dependencies>
            <!-- SpringCloud 微服务 -->
            <dependency>
                <groupId>org.springframework.cloud</groupId>
                <artifactId>spring-cloud-dependencies</artifactId>
                <version>${spring-cloud.version}</version>
                <type>pom</type>
                <scope>import</scope>
            </dependency>
        </dependencies>
    </dependencyManagement>
    <dependencies>
               <!--     stream   kafka-->
        <dependency>
            <groupId>org.springframework.cloud</groupId>
            <artifactId>spring-cloud-stream-binder-kafka</artifactId>
        </dependency>
    </dependencies>
        yml
            
            
              yaml
              
              
            
          
          spring:
  application:
    name: boot-kafka
  # spring cloud stream 配置方式
  cloud:
    stream:
      kafka:
        binder:
          brokers: 192.168.1.145:9092
          
      # 绑定器定义
      bindings:
        # 定义发送者,即消息生产者
        uavState-out-0:
          # topic 名称 , 对应代码中的 UAV_STATE_TOPIC 常量
          destination: uav-state
        # 定义接收者,即消息消费者
        uavState-in-0:
          # topic名称
          destination: uav-state
          # group-id 
          group: comsumer-service-part-beijing
        发送消息
            
            
              java
              
              
            
          
          @Slf4j
@RequiredArgsConstructor
@RestController
@RequestMapping("/default")
public class DefaultController {
    private final DynamicKafkaProducer dynamicKafkaProducer;
    @GetMapping("/testDynamicTopic")
    public String testDynamicTopic() {
        String msg = UUID.randomUUID() + "测试消息内容";
        dynamicKafkaProducer.sendToDynamicTopic(UAV_STATE_TOPIC, msg);
        return "消息发送成功。" + msg;
    }
}
        核心是通过 StreamBridge 对象完成消息的发送
            
            
              java
              
              
            
          
          @Slf4j
@RequiredArgsConstructor
@Component
public class DynamicKafkaProducer {
    private final StreamBridge streamBridge;
    public void sendToDynamicTopic(String topic, Object message) {
        // 动态创建 binding: {topic}-out-0
        String bindingName = topic + "-out-0";
        streamBridge.send(bindingName, message);
        log.info("消息发送成功,bindingName={}", bindingName);
    }
}
        接收消息
            
            
              java
              
              
            
          
          @Slf4j
@Configuration
public class FunctionalMessageConsumer {
    /**
     * 消费单条消息,不回复。
     * Bean 名称 'uavState 对应 application.yml 中的 uavState-in-0
     *
     * <p></p>
     * 若需要回复,参考如下写法:
     *  @Bean
     *     public Function<Order, OrderResult> orderProcessor() {
     *         return order -> {
     *             System.out.println("处理订单: " + order.getOrderNo());
     *
     *             // 业务处理逻辑
     *             OrderResult result = new OrderResult();
     *             result.setOrderNo(order.getOrderNo());
     *             result.setStatus("SUCCESS");
     *             result.setProcessTime(System.currentTimeMillis());
     *
     *             return result;
     *         };
     *     }
     *
     */
    @Bean
    public Consumer<Message<String>> uavState() {
        return message -> {
            log.info("Received via Function,payload: {}", message.getPayload());
            //Received via Function,headers:
            // {deliveryAttempt=1, kafka_timestampType=CREATE_TIME,
            // kafka_receivedTopic=uav-state, kafka_offset=4,
            // scst_nativeHeadersPresent=true, kafka_consumer=org.apache.kafka.clients.consumer.KafkaConsumer@308bacf9,
            // source-type=kafka, id=80df005a-eac2-f2d9-a6e8-ce27a396968f, kafka_receivedPartitionId=0,
            // contentType=application/json, kafka_receivedTimestamp=1762238548403,
            // kafka_groupId=comsumer-service-part-beijing, timestamp=1762238548577}
            log.info("Received via Function,headers: {}", message.getHeaders());
            // 处理消息逻辑...
            // 如果需要访问 headers 等元数据,可以使用 Message<String> 类型作为参数
        };
    }
}
        注意
- 两个组件是可以同时使用的,pom层面 spring-cloud-stream-binder-kafka 本身就会依赖spring-kafka
 - 通过stream-kafka发送的消息,也可以使用 @KafkaListener 接收到。