kafka入门

  • 生产者发送消息,多个消费者只能有一个消费者接收到消息

  • 生产者发送消息,多个消费者都可以接收到消息

(1)创建kafka-demo项目,导入依赖

java 复制代码
<dependency>
    <groupId>org.apache.kafka</groupId>
    <artifactId>kafka-clients</artifactId>
    <version>3.4.0</version>
</dependency>

(2)生产者发送消息

java 复制代码
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.util.Properties;

/**
 * 生产者
 */
public class ProducerQuickStart {

    public static void main(String[] args) {
        //1.kafka的配置信息
        Properties properties = new Properties();
        //kafka的连接地址
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.140.100:9092");
        //发送失败,失败的重试次数
        properties.put(ProducerConfig.RETRIES_CONFIG,5);
        //消息key的序列化器
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringSerializer");
        //消息value的序列化器
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringSerializer");

        //2.生产者对象
        KafkaProducer<String,String> producer = new KafkaProducer<String, String>(properties);

        //封装发送的消息
        ProducerRecord<String,String> record = new ProducerRecord<String, String>("green-topic","100001","hello kafka");

        //3.发送消息
        producer.send(record);

        //4.关闭消息通道,必须关闭,否则消息发送不成功
        producer.close();
    }

}

(3)消费者接收消息

java 复制代码
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

import java.time.Duration;
import java.util.Collections;
import java.util.Properties;

/**
 * 消费者
 */
public class ConsumerQuickStart {

    public static void main(String[] args) {
        //1.添加kafka的配置信息
        Properties properties = new Properties();
        //kafka的连接地址
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.140.100:9092");
        //消费者组
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, "group1");
        //消息的反序列化器
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");

        //2.消费者对象
        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(properties);

        //3.订阅主题
        consumer.subscribe(Collections.singletonList("green-topic"));

        //当前线程一直处于监听状态 每一秒拉取一次
        while (true) {
            //4.获取消息
            ConsumerRecords<String, String> consumerRecords = consumer.poll(Duration.ofMillis(1000));
            for (ConsumerRecord<String, String> consumerRecord : consumerRecords) {
                System.out.println(consumerRecord.key());
                System.out.println(consumerRecord.value());
            }
        }

    }

}

SpringBoot集成kafka

1.导入spring-kafka依赖信息

java 复制代码
<dependencies>
    <dependency>
        <groupId>org.springframework.boot</groupId>
        <artifactId>spring-boot-starter-web</artifactId>
    </dependency>
    <!-- kafkfa -->
    <dependency>
        <groupId>org.springframework.kafka</groupId>
        <artifactId>spring-kafka</artifactId>
        <exclusions>
            <exclusion>
                <groupId>org.apache.kafka</groupId>
                <artifactId>kafka-clients</artifactId>
            </exclusion>
        </exclusions>
    </dependency>
    <dependency>
        <groupId>org.apache.kafka</groupId>
        <artifactId>kafka-clients</artifactId>
    </dependency>
    <dependency>
        <groupId>com.alibaba</groupId>
        <artifactId>fastjson</artifactId>
    </dependency>
</dependencies>

2.在resources下创建文件application.yml

java 复制代码
server:
  port: 9991
spring:
  application:
    name: kafka-demo
  kafka:
    bootstrap-servers: 192.168.140.100:9092
    producer:
      retries: 10
      key-serializer: org.apache.kafka.common.serialization.StringSerializer
      value-serializer: org.apache.kafka.common.serialization.StringSerializer
    consumer:
      group-id: ${spring.application.name}-test
      key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
      value-deserializer: org.apache.kafka.common.serialization.StringDeserializer

3.消息生产者

java 复制代码
import com.alibaba.fastjson.JSON;
import com.heima.kafka.pojo.User;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;

@RestController
public class HelloController {

    @Autowired
    private KafkaTemplate<String,String> kafkaTemplate;

    @GetMapping("/hello")
    public String hello(){
        kafkaTemplate.send("green-topic","hello kafka...");
        return "ok";
    }

    //传递的消息为对象
    @GetMapping("/hello2")
    public String hello2(){
        User user = new User();
        user.setUsername("小明");
        user.setAge(18);
        kafkaTemplate.send("green-topic2", JSON.toJSONString(user));
        return "ok2";
    }
}

4.消息消费者

java 复制代码
import com.alibaba.fastjson.JSON;
import com.heima.kafka.pojo.User;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;

@Component
public class HelloListener {

    @KafkaListener(topics = "green-topic")
    public void onMessage(String message){
        if(!StringUtils.isEmpty(message)){
            System.out.println(message);
        }
    }

    @KafkaListener(topics = "green-topic2")
    public void onMessage2(String message){
        if(!StringUtils.isEmpty(message)){
            System.out.println(JSON.parseObject(message, User.class));
        }

    }
}
相关推荐
虫小宝35 分钟前
查券返利机器人的异步任务调度:Java XXL-Job+Redis实现海量查券请求的分布式任务分发
java·redis·分布式
liux35282 小时前
MySQL -> Canal -> Kafka-> ES 完整数据同步流程详解
mysql·elasticsearch·kafka
yq1982043011562 小时前
构建高可用资源导航平台:基于Django+Scrapy的分布式架构实践
分布式·scrapy·django
Byte Beat2 小时前
使用docker单机部署kafka,以KRaft模式运行,不使用zookeeper,
docker·kafka·kraft
你这个代码我看不懂2 小时前
Kafka常见问题解答
分布式·kafka
Tony Bai3 小时前
Git 即数据库:Beads (bd) —— 专为 AI Agent 打造的分布式任务追踪引擎
数据库·人工智能·分布式·git
小邓睡不饱耶3 小时前
Spark Streaming实时微博热文分析系统:架构设计与深度实现
大数据·分布式·spark
北亚数据恢复3 小时前
分布式数据恢复—Ceph+TiDB数据恢复报告
分布式·ceph·数据恢复·tidb·服务器数据恢复·北亚数据恢复·存储数据恢复
Zilliz Planet3 小时前
<span class=“js_title_inner“>Spark做ETL,与Ray/Daft做特征工程的区别在哪里,如何选型?</span>
大数据·数据仓库·分布式·spark·etl
小二·3 小时前
Go 语言系统编程与云原生开发实战(第8篇)消息队列实战:Kafka 事件驱动 × CQRS 架构 × 最终一致性(生产级落地)
云原生·golang·kafka