Flink之Kafka Sink

  • 代码内容
java 复制代码
package com.jin.demo;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.kafka.clients.producer.ProducerConfig;

import java.util.Properties;

/**
 * @Author: J
 * @Version: 1.0
 * @CreateTime: 2023/6/29
 * @Description: 测试
 **/
public class FlinkKafkaSink {
    public static void main(String[] args) throws Exception {
        // 创建流环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        // 设置并行度为1
        env.setParallelism(1);
        // 添加数据源(CustomizeSource为自定义数据源,便于测试)
        SingleOutputStreamOperator<String> mapStream = env.addSource(new CustomizeSource()).map(bean -> bean.toString());
        // 设置生产者事务超时时间
        Properties properties = new Properties();
        properties.setProperty(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG, "10000");
        // 构建KafkaSink
        KafkaSink<String> kafkaSink = KafkaSink.<String>builder()
                // 配置Kafka服务
                .setBootstrapServers("lx01:9092")
                // 配置消息序列化类型
                .setRecordSerializer(KafkaRecordSerializationSchema.<String>builder()
                        // 配置kafka topic信息
                        .setTopic("tpc-02")
                        // 配置value序列化类型
                        .setValueSerializationSchema(new SimpleStringSchema())
                        .build()
                )
                // 设置语义
                .setDeliverGuarantee(DeliveryGuarantee.AT_LEAST_ONCE)
                // 设置事务ID前缀
                .setTransactionalIdPrefix("JL-")
                .build();
        // 将结果输出到kafka
        mapStream.sinkTo(kafkaSink);
        env.execute("Kafka Sink");
    }
}

结果数据

powershell 复制代码
[root@lx01 bin]# ./kafka-console-consumer.sh --bootstrap-server lx01:9092 --topic tpc-02
CustomizeBean(name=AAA-274, age=64, gender=W, hobbit=钓鱼爱好者)
CustomizeBean(name=AAA-973, age=45, gender=W, hobbit=钓鱼爱好者)
CustomizeBean(name=AAA-496, age=71, gender=W, hobbit=非遗文化爱好者)
CustomizeBean(name=AAA-263, age=45, gender=M, hobbit=天文知识爱好者)
CustomizeBean(name=AAA-790, age=77, gender=W, hobbit=书法爱好者)
CustomizeBean(name=AAA-806, age=38, gender=M, hobbit=非遗文化爱好者)
CustomizeBean(name=AAA-498, age=58, gender=M, hobbit=篮球运动爱好者)
CustomizeBean(name=AAA-421, age=63, gender=M, hobbit=书法爱好者)
CustomizeBean(name=AAA-938, age=56, gender=W, hobbit=乒乓球运动爱好者)
CustomizeBean(name=AAA-278, age=18, gender=M, hobbit=乒乓球运动爱好者)
CustomizeBean(name=AAA-614, age=74, gender=W, hobbit=钓鱼爱好者)
CustomizeBean(name=AAA-249, age=67, gender=W, hobbit=天文知识爱好者)
CustomizeBean(name=AAA-690, age=72, gender=W, hobbit=网吧战神)
CustomizeBean(name=AAA-413, age=69, gender=M, hobbit=美食爱好者)
相关推荐
c&0xff008 小时前
Flink反压问题
网络·flink
在未来等你10 小时前
Kafka面试精讲 Day 8:日志清理与数据保留策略
大数据·分布式·面试·kafka·消息队列
江畔独步11 小时前
Flink TaskManager日志时间与实际时间有偏差
大数据·flink
echoyu.11 小时前
消息队列-初识kafka
java·分布式·后端·spring cloud·中间件·架构·kafka
cg.family15 小时前
Doris 消费kafka消息
kafka·doris
趴着喝可乐16 小时前
openEuler2403安装部署Kafka
kafka·openeuler
大数据点灯人16 小时前
【Flink】Flink Runtime 开发指南
大数据·flink
掘金-我是哪吒20 小时前
分布式微服务系统架构第170集:Kafka消费者并发-多节点消费-可扩展性
分布式·微服务·架构·kafka·系统架构
何双新20 小时前
第 3 讲:KAFKA生产者(Producer)详解
分布式·kafka·linq
像豆芽一样优秀21 小时前
Hive和Flink数据倾斜问题
大数据·数据仓库·hive·hadoop·flink