Spring集成kafka

1. 引入依赖

xml 复制代码
 <dependency>
     <groupId>org.springframework.kafka</groupId>
     <artifactId>spring-kafka</artifactId>
     <version>2.8.4</version>
 </dependency>

2. yaml配置信息

java 复制代码
spring:
  kafka:
    bootstrap-servers: 127.0.0.1:9092
    producer:
      key-serializer: org.apache.kafka.common.serialization.StringSerializer
      value-serializer: org.apache.kafka.common.serialization.StringSerializer
      retries: 3  # 重试次数
      batch-size: 16384 #批处理大小
      buffer-memory: 33554432 #缓冲区内存
      #acks: all # 最高可靠性
      #linger-ms: 50 # 等待时间
    consumer:
      key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      group-id: order-service-group # 消费者组
      auto-offset-reset: latest #消费者在启动时从哪里开始消费消息。可选值为"earliest"(从最早的消息开始消费)和"latest"(从最新的消息开始消费)
      enable-auto-commit: false #是否启用自动提交消费位移。如果设置为true,则消费者会自动提交消费位移,否则需要手动提交。
      # 提交offset延时(接收到消息后多久提交offset)
      #auto-commit-interval: 1000
      #properties:
        #session.timeout.ms: 60000
    # 监听器配置
    listener:
      ack-mode: manual_immediate # 手动提交
      concurrency: 3 # 并发消费者数
      #missing-topics-fatal: false
      #idle-event-interval: 60000  # 空闲事件间隔
      #log-container-config: false

3. 配置类

java 复制代码
@Configuration
@EnableConfigurationProperties({KafkaProperties.class})
@EnableKafka
@AllArgsConstructor
public class KafkaConfiguration {


	private final KafkaProperties kafkaProperties;

	@Bean
	public KafkaTemplate<String, String> kafkaTemplate() {
		return new KafkaTemplate<>(producerFactory());
	}

	@Bean
	public ProducerFactory<String, String> producerFactory() {
		return new DefaultKafkaProducerFactory<>(kafkaProperties.buildProducerProperties());
	}

	@Bean
	public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
		ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
		factory.setConsumerFactory(consumerFactory());
		factory.setConcurrency(3);
		// 配置批处理监听器,true表示启用
		factory.setBatchListener(true);
		// 配置的是消费者 轮询消息时的超时时间,单位是毫秒。轮询是 Kafka 消费者不断从 Kafka 分区拉取消息的过程。
		factory.getContainerProperties().setPollTimeout(3000);
		return factory;
	}

	@Bean
	public ConsumerFactory<String, String> consumerFactory() {
		return new DefaultKafkaConsumerFactory<>(kafkaProperties.buildConsumerProperties());
	}

	@Bean("ackContainerFactory")
	public ConcurrentKafkaListenerContainerFactory<String, String> ackContainerFactory() {
		ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
		factory.setConsumerFactory(consumerFactory());
		// 手动确认模式,消息处理完毕后立即确认消息已消费
		factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL_IMMEDIATE);
		// 设置并发消费者线程数
		factory.setConcurrency(3);
		return factory;
	}

}

4. 生产者

  • 提供三种发送消息的方式,第一种不设置消息头部信息,另外两种可根据不同的方式设置消息头部信息
java 复制代码
@Slf4j
@RestController
@RequestMapping("/kafkaMsg")
public class MessageController {

    @Resource
    private KafkaTemplate<String, String> kafkaTemplate;

    @GetMapping( "/springSend")
    public void send(@RequestParam String message) {
         //发送消息
        ListenableFuture<SendResult<String, String>> future = kafkaTemplate.send("highLoadEvent", message);
        // 监听回调
        future.addCallback(new ListenableFutureCallback<SendResult<String, String>>() {
            @Override
            public void onSuccess(SendResult<String, String> result) {
                log.error("成功发送信息:{}......", result);
            }
            @Override
            public void onFailure(Throwable e) {
                log.info("发送消息失败......");
            }
        });
    }

    @GetMapping( "/springSendHeader1")
    public void sendHeader1(@RequestParam String message) {
        List<Header> headers = new ArrayList<>();
        // 添加自定义header
        headers.add(new RecordHeader("event", "TSP001".getBytes()));
        // 主题、分区、key、value、header
        ProducerRecord<String, String> record = new ProducerRecord<>("highLoadEvent1", 0, "", message, headers);
        log.info("sending message='{}' to topic='{}'", message, "highLoadEvent1");
        kafkaTemplate.send(record);
    }

    @GetMapping( "/springSendHeader2")
    public void sendHeader2(@RequestParam String message) {
        // 构建MessageBuilder对象须添加主题, 可添加自定义header
        Message<String> messageBuilder = MessageBuilder.withPayload(message)
                .setHeader(KafkaHeaders.TOPIC, "highLoadEvent2")
                //.setHeader(KafkaHeaders.MESSAGE_KEY, "999")
                //.setHeader(KafkaHeaders.PARTITION_ID, 0)
                .setHeader("event", "TSP002").build();
        log.info("sending messageBuilder='{}' to topic='{}'", message, "highLoadEvent2");
        kafkaTemplate.send(messageBuilder);
    }
}

5. 消费者

java 复制代码
@Slf4j
@Component
public class SpringKafkaListener {

    @KafkaListener(topics = "highLoadEvent1", groupId = "${commons.kafka.consumer.group-id}")
    public void handle(String message) {
        log.warn("接收到消息: {}", message);
    }

    @KafkaListener(topics = "highLoadEvent2",containerFactory = "ackContainerFactory")
    public void handleMessage(ConsumerRecord<String, String> record, Acknowledgment acknowledgment) {
        try {
            log.warn("正在接收消息: {}", record.value());
        } catch (Exception e) {
            log.error("处理消息时发生错误");
        } finally {
            acknowledgment.acknowledge();
        }
    }
}
相关推荐
IT 行者2 小时前
Spring Security Session 序列化策略分析
java·spring boot·后端·spring
IT 行者2 小时前
Spring Boot 4.0 整合Spring Security 7 后的统一异常处理指南
spring boot·后端·spring
学博成3 小时前
在 Spring Boot 中使用 Kafka 并保证顺序性(Topic 分区为 100)的完整案例
spring boot·kafka
短剑重铸之日13 小时前
SpringBoot声明式事务的源码解析
java·后端·spring·springboot
while(1){yan}14 小时前
SpringDI
java·jvm·spring·java-ee
milanleon14 小时前
使用Spring Security进行登录认证
java·前端·spring
气π15 小时前
【JavaWeb】——(若依+AI)-帝可得实践项目
java·spring
Linux Huang19 小时前
spring注册组件/服务无效,问题排查
大数据·服务器·数据库·spring
Craaaayon19 小时前
深入浅出 Spring Event:原理剖析与实战指南
java·spring boot·后端·spring