kafka 快速上手

下载 Apache Kafka

演示window 安装

编写启动脚本,脚本的路径根据自己实际的来

启动说明

先启动zookeeper后启动kafka,关闭是先关kafka,然后关闭zookeeper

巧记: 铲屎官(zookeeper)总是第一个到,最后一个走

启动zookeeper

call bin/windows/zookeeper-server-start.bat config/zookeeper.properties

启动kafka

call bin/windows/kafka-server-start.bat config/server.properties

测试脚本,主要用于创建主题 'test-topic'

javascript 复制代码
# 创建主题(窗口1)
bin/window> kafka-topics.bat --bootstrap-server localhost:9092 --topic test-topic  --create

# 查看主题
bin/window> kafka-topics.bat --bootstrap-server localhost:9092 --list
bin/window> kafka-topics.bat --bootstrap-server localhost:9092 --topic test-topic --describe

# 修改某主题的分区
bin/window> kafka-topics.bat --bootstrap-server localhost:9092 --topic test-topic --alter --partitions 2


# 生产消息(窗口2)向test-topic主题发送消息
bin/window> kafka-console-producer.bat --bootstrap-server localhost:9092 --topic test-topic
>hello kafka

# 消费消息(窗口3)消费test-topic主题的消息
bin/window> kafka-console-consumer.bat --bootstrap-server localhost:9092 --topic test-topic
java 复制代码
package com.ldj.kafka.admin;

import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.AdminClientConfig;
import org.apache.kafka.clients.admin.CreateTopicsResult;
import org.apache.kafka.clients.admin.NewTopic;

import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;

/**
 * User: ldj
 * Date: 2024/6/13
 * Time: 0:00
 * Description: 创建主题
 */
public class AdminTopic {

    public static void main(String[] args) {
        Map<String, Object> adminConfigMap = new HashMap<>();
        adminConfigMap.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");

        NewTopic topic1 = new NewTopic("topic-01", 1, (short) 1);
        NewTopic topic2 = new NewTopic("topic-02", 2, (short) 1);

        AdminClient adminClient = AdminClient.create(adminConfigMap);
        CreateTopicsResult addResult = adminClient.createTopics(Arrays.asList(topic1, topic2));

        //DeleteTopicsResult delResult = adminClient.deleteTopics(Arrays.asList("topic-02"));
        
        adminClient.close();
    }

}
java 复制代码
package com.ldj.kafka.producer;

import com.alibaba.fastjson.JSON;
import com.ldj.kafka.model.UserEntity;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.HashMap;
import java.util.Map;
import java.util.Objects;

/**
 * User: ldj
 * Date: 2024/6/12
 * Time: 21:08
 * Description: 生产者
 */
public class KfkProducer {

    public static void main(String[] args) {

        //生产者配置
        Map<String, Object> producerConfigMap = new HashMap<>();
        producerConfigMap.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
        producerConfigMap.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        producerConfigMap.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);

        //创建生产者
        KafkaProducer<String, String> producer = new KafkaProducer<>(producerConfigMap);

        //构建消息 ProducerRecord(String topic, Integer partition, Long timestamp, K key, V value, Iterable<Header> headers)
        try {
            for (int i = 0; i < 10; i++) {
                UserEntity userEntity = new UserEntity()
                        .setUserId(2436687942335620L + i)
                        .setUsername("lisi")
                        .setGender(1)
                        .setAge(18);

                ProducerRecord<String, String> record = new ProducerRecord<>(
                        "test-topic",
                        userEntity.getUserId().toString(),
                        JSON.toJSONString(userEntity));
                //发送数据到Broker

                producer.send(record, (RecordMetadata var1, Exception var2) -> {
                    if (Objects.isNull(var2)) {
                        System.out.printf("[%s]消息发送成功!", userEntity.getUserId());
                    } else {
                        System.out.printf("[%s]消息发送失败!err:%s", userEntity.getUserId(), var2.getCause());
                    }
                });
            }
        } finally {
            //关闭通道
            producer.close();
        }
    }

}
java 复制代码
package com.ldj.kafka.consumer;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.time.Duration;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;

/**
 * User: ldj
 * Date: 2024/6/12
 * Time: 21:10
 * Description: 消费者
 */
public class KfkConsumer {

    public static void main(String[] args) {

        //消费者配置
        Map<String, Object> consumerConfigMap = new HashMap<>();
        consumerConfigMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
        consumerConfigMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        consumerConfigMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        //所属消费组
        consumerConfigMap.put(ConsumerConfig.GROUP_ID_CONFIG, "test123456");

        //创建消费者
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(consumerConfigMap);

        //消费主题的消息  ConsumerRebalanceListener
        consumer.subscribe(Collections.singletonList("test-topic"));

        try {
            while (true) {
                ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(5));
                //数据存储结构:Map<TopicPartition, List<ConsumerRecord<K, V>>> records;
                for (ConsumerRecord<String, String> record : records) {
                    System.out.println(record.value());
                }
            }
        } finally {
            //关闭消费者
            consumer.close();
        }
    }

}
相关推荐
Mephisto.java36 分钟前
【大数据学习 | kafka高级部分】kafka的kraft集群
大数据·sql·oracle·kafka·json·hbase
Mephisto.java38 分钟前
【大数据学习 | kafka高级部分】kafka的文件存储原理
大数据·sql·oracle·kafka·json
yx9o1 小时前
Kafka 源码 KRaft 模式本地运行
分布式·kafka
java1234_小锋9 小时前
讲讲RabbitMQ 性能优化
kafka
码农爱java16 小时前
Kafka 之消息并发消费
spring boot·微服务·kafka·mq·消息中间件·并发消费
Mephisto.java20 小时前
【大数据学习 | kafka高级部分】kafka的快速读写
大数据·redis·sql·kafka·flume
Mephisto.java1 天前
【大数据学习 | kafka高级部分】文件清除原理
大数据·hadoop·zookeeper·spark·kafka·hbase·flume
0_1_bits1 天前
【系统设计】高效的分布式系统:使用 Spring Boot 和 Kafka 实现 Saga 模式
spring boot·后端·云原生·架构·kafka·linq
不想睡觉的橘子君1 天前
【MQ】RabbitMQ、RocketMQ、kafka特性对比
kafka·rabbitmq·rocketmq
码农爱java2 天前
Kafka 之顺序消息
spring boot·分布式·微服务·kafka·mq·消息中间件·顺序消息