kafka 快速上手

下载 Apache Kafka

演示window 安装

编写启动脚本,脚本的路径根据自己实际的来

启动说明

先启动zookeeper后启动kafka,关闭是先关kafka,然后关闭zookeeper

巧记: 铲屎官(zookeeper)总是第一个到,最后一个走

启动zookeeper

call bin/windows/zookeeper-server-start.bat config/zookeeper.properties

启动kafka

call bin/windows/kafka-server-start.bat config/server.properties

测试脚本,主要用于创建主题 'test-topic'

javascript 复制代码
# 创建主题(窗口1)
bin/window> kafka-topics.bat --bootstrap-server localhost:9092 --topic test-topic  --create

# 查看主题
bin/window> kafka-topics.bat --bootstrap-server localhost:9092 --list
bin/window> kafka-topics.bat --bootstrap-server localhost:9092 --topic test-topic --describe

# 修改某主题的分区
bin/window> kafka-topics.bat --bootstrap-server localhost:9092 --topic test-topic --alter --partitions 2


# 生产消息(窗口2)向test-topic主题发送消息
bin/window> kafka-console-producer.bat --bootstrap-server localhost:9092 --topic test-topic
>hello kafka

# 消费消息(窗口3)消费test-topic主题的消息
bin/window> kafka-console-consumer.bat --bootstrap-server localhost:9092 --topic test-topic
java 复制代码
package com.ldj.kafka.admin;

import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.AdminClientConfig;
import org.apache.kafka.clients.admin.CreateTopicsResult;
import org.apache.kafka.clients.admin.NewTopic;

import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;

/**
 * User: ldj
 * Date: 2024/6/13
 * Time: 0:00
 * Description: 创建主题
 */
public class AdminTopic {

    public static void main(String[] args) {
        Map<String, Object> adminConfigMap = new HashMap<>();
        adminConfigMap.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");

        NewTopic topic1 = new NewTopic("topic-01", 1, (short) 1);
        NewTopic topic2 = new NewTopic("topic-02", 2, (short) 1);

        AdminClient adminClient = AdminClient.create(adminConfigMap);
        CreateTopicsResult addResult = adminClient.createTopics(Arrays.asList(topic1, topic2));

        //DeleteTopicsResult delResult = adminClient.deleteTopics(Arrays.asList("topic-02"));
        
        adminClient.close();
    }

}
java 复制代码
package com.ldj.kafka.producer;

import com.alibaba.fastjson.JSON;
import com.ldj.kafka.model.UserEntity;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.HashMap;
import java.util.Map;
import java.util.Objects;

/**
 * User: ldj
 * Date: 2024/6/12
 * Time: 21:08
 * Description: 生产者
 */
public class KfkProducer {

    public static void main(String[] args) {

        //生产者配置
        Map<String, Object> producerConfigMap = new HashMap<>();
        producerConfigMap.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
        producerConfigMap.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
        producerConfigMap.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);

        //创建生产者
        KafkaProducer<String, String> producer = new KafkaProducer<>(producerConfigMap);

        //构建消息 ProducerRecord(String topic, Integer partition, Long timestamp, K key, V value, Iterable<Header> headers)
        try {
            for (int i = 0; i < 10; i++) {
                UserEntity userEntity = new UserEntity()
                        .setUserId(2436687942335620L + i)
                        .setUsername("lisi")
                        .setGender(1)
                        .setAge(18);

                ProducerRecord<String, String> record = new ProducerRecord<>(
                        "test-topic",
                        userEntity.getUserId().toString(),
                        JSON.toJSONString(userEntity));
                //发送数据到Broker

                producer.send(record, (RecordMetadata var1, Exception var2) -> {
                    if (Objects.isNull(var2)) {
                        System.out.printf("[%s]消息发送成功!", userEntity.getUserId());
                    } else {
                        System.out.printf("[%s]消息发送失败!err:%s", userEntity.getUserId(), var2.getCause());
                    }
                });
            }
        } finally {
            //关闭通道
            producer.close();
        }
    }

}
java 复制代码
package com.ldj.kafka.consumer;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.time.Duration;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;

/**
 * User: ldj
 * Date: 2024/6/12
 * Time: 21:10
 * Description: 消费者
 */
public class KfkConsumer {

    public static void main(String[] args) {

        //消费者配置
        Map<String, Object> consumerConfigMap = new HashMap<>();
        consumerConfigMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
        consumerConfigMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        consumerConfigMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        //所属消费组
        consumerConfigMap.put(ConsumerConfig.GROUP_ID_CONFIG, "test123456");

        //创建消费者
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(consumerConfigMap);

        //消费主题的消息  ConsumerRebalanceListener
        consumer.subscribe(Collections.singletonList("test-topic"));

        try {
            while (true) {
                ConsumerRecords<String, String> records = consumer.poll(Duration.ofSeconds(5));
                //数据存储结构:Map<TopicPartition, List<ConsumerRecord<K, V>>> records;
                for (ConsumerRecord<String, String> record : records) {
                    System.out.println(record.value());
                }
            }
        } finally {
            //关闭消费者
            consumer.close();
        }
    }

}
相关推荐
闻哥7 小时前
Kafka高吞吐量核心揭秘:四大技术架构深度解析
java·jvm·面试·kafka·rabbitmq·springboot
indexsunny20 小时前
互联网大厂Java面试实战:Spring Boot微服务在电商场景中的应用与挑战
java·spring boot·redis·微服务·kafka·spring security·电商
TTBIGDATA20 小时前
【Atlas】Ambari 中 开启 Kerberos + Ranger 后 Atlas Hook 无权限访问 Kafka Topic:ATLAS_HOOK
大数据·kafka·ambari·linq·ranger·knox·bigtop
岁岁种桃花儿1 天前
Kafka从入门到上天系列第一篇:kafka的安装和启动
大数据·中间件·kafka
TTBIGDATA2 天前
【Atlas】Atlas Hook 消费 Kafka 报错:GroupAuthorizationException
hadoop·分布式·kafka·ambari·hdp·linq·ranger
indexsunny2 天前
互联网大厂Java面试实战:微服务与Spring生态技术解析
java·spring boot·redis·kafka·mybatis·hibernate·microservices
编程彩机2 天前
互联网大厂Java面试:从Spring Boot到分布式事务的技术场景解析
spring boot·kafka·分布式事务·微服务架构·java面试·技术解析
没有bug.的程序员2 天前
RocketMQ 与 Kafka 深度对垒:分布式消息引擎内核、事务金融级实战与高可用演进指南
java·分布式·kafka·rocketmq·分布式消息·引擎内核·事务金融
yumgpkpm2 天前
华为昇腾300T A2训练、微调Qwen过程,带保姆式命令,麒麟操作系统+鲲鹏CPU
hive·hadoop·华为·flink·spark·kafka·hbase
ApachePulsar2 天前
演讲回顾|谙流科技在 Kafka on Pulsar 之上的探索
分布式·科技·kafka