使用finksql方式将mysql数据同步到kafka中,每次只能同步一张表

使用finksql方式将mysql数据同步到kafka中,每次只能同步一张表

bash 复制代码
package flink;


import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;


public class FlinkSQL_CDC {

    public static void main(String[] args) throws Exception {

//
//        Configuration conf = new Configuration();
//        conf.setInteger("rest.port",3335);
//        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);

        //1.创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);


        //2.创建Flink-MySQL-CDC的Source
        TableResult tableResult = tableEnv.executeSql("CREATE TABLE table_name (" +
                "  id INT primary key," +
                "  name STRING" +
                ") WITH (" +
                "  'connector' = 'mysql-cdc'," +
                "  'hostname' = 'hadoop102'," +
                "  'port' = '3306'," +
                "  'username' = 'root'," +
                "  'password' = 'xxxx'," +
                "  'database-name' = 'student'," +
                "  'table-name' = 'table_name'," +
                "'server-time-zone' = 'Asia/Shanghai'," +
                "'scan.startup.mode' = 'initial'" +
                ")"
        );

        // 2. 注册SinkTable: sink_sensor
//        tableEnv.executeSql("" +
//                "CREATE TABLE kafka_binlog ( " +
//                "  user_id INT, " +
//                "  user_name STRING, " +
//                "`proc_time` as PROCTIME()" +
//                ") WITH ( " +
//                "  'connector' = 'kafka', " +
//                "  'topic' = 'test2', " +
//                "  'properties.bootstrap.servers' = 'hadoop102:9092', " +
//                "  'format' = 'json' " +
//                ")" +
//                "");

        //upsert-kafka
        tableEnv.executeSql("" +
                "CREATE TABLE kafka_binlog ( " +
                "  user_id INT, " +
                "  user_name STRING, " +
                "`proc_time` as PROCTIME()," +
                "  PRIMARY KEY (user_id) NOT ENFORCED" +
                ") WITH ( " +
                "  'connector' = 'upsert-kafka', " +
                "  'topic' = 'test2', " +
                "  'properties.bootstrap.servers' = 'hadoop102:9092', " +
                "  'key.format' = 'json' ," +
                "  'value.format' = 'json' " +
                ")" +
                "");


        // 3. 从SourceTable 查询数据, 并写入到 SinkTable
         tableEnv.executeSql("insert into kafka_binlog select * from table_name");

         tableEnv.executeSql("select * from kafka_binlog").print();

        env.execute();
    }

}
相关推荐
Jerry.张蒙3 小时前
SAP业财一体化实现的“隐形桥梁”-价值串
大数据·数据库·人工智能·学习·区块链·aigc·运维开发
无名修道院3 小时前
DVWA 靶场搭建:Windows11(phpstudy 搭建)(步骤 + 截图 + 常见问题)
数据库·网络安全·渗透测试·靶场·php·dvwa·phpstudy
UCoding5 小时前
新能源技术面试 -- 给出一套mysql备份容灾方案
mysql·面试·主从
CodeAmaz5 小时前
MySQL 事务隔离级别详解
数据库·mysql·事务隔离级别
千寻技术帮5 小时前
10398_基于SSM的教学评价管理系统
数据库·mysql·毕业设计·ssm·教学评价
晨星3345 小时前
使用 IntelliJ IDEA 轻松连接 Java 与 MySQL 8 数据库
java·开发语言·数据库
PWRJOY6 小时前
【MySQL】使用mycli查看数据库的基本操作
数据库·mysql
SadSunset6 小时前
(16)Bean的实例化
java·数据库·笔记·spring
JIngJaneIL7 小时前
基于Java + vue干洗店预约洗衣系统(源码+数据库+文档)
java·开发语言·前端·数据库·vue.js·spring boot
cui_win7 小时前
MySQL 压测实战:sysbench 从入门到精通
数据库·mysql·压测·sysbench