flink: 自定义表函数的用法

复制代码
package cn.edu.tju.demo3;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.*;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.*;
import org.apache.flink.table.functions.ScalarFunction;
import org.apache.flink.table.functions.TableFunction;
import org.apache.flink.types.Row;

public class Test49 {
    private static String HOST_NAME = "xx.xx.xx.xx";
    private static int PORT = 9999;
    private static String DELIMITER ="\n";


    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);



        DataStream<String> socketDataInfo =  env.socketTextStream(HOST_NAME, PORT, DELIMITER);
        SingleOutputStreamOperator<DataInfo> dataInfoStream = socketDataInfo.map(new MapFunction<String, DataInfo>() {
            @Override
            public DataInfo map(String value) throws Exception {

                String[] stringList = value.split(",");
                DataInfo dataInfo = new DataInfo(Long.parseLong(
                        stringList[0]), stringList[1], Double.parseDouble(stringList[2]));
                return dataInfo;
            }
        });

        Table dataTable = tableEnv.fromDataStream(dataInfoStream,"ts,info,val");
        tableEnv.registerFunction("myTableFunction", new MyTableFunction());
        Table resultTable = dataTable.select("ts,info,val")
                .joinLateral("myTableFunction(val) as(a,b)")
                        .select("ts,info,a,b");

        tableEnv.createTemporaryView("dataInfo", dataTable);

        Table resultTableSql = tableEnv.sqlQuery(
                "select ts,info,val,a,b from dataInfo,LATERAL TABLE(myTableFunction(val)) as res (a,b)"

        );
        

        tableEnv.toAppendStream(resultTable, Row.class).print();
        tableEnv.toAppendStream(resultTableSql, Row.class).print("sql");

        env.execute("my job");

    }

    public static class DataInfo{
        private long ts;
        private String info;
        private double val;

        public long getTs() {
            return ts;
        }

        public void setTs(long ts) {
            this.ts = ts;
        }

        public String getInfo() {
            return info;
        }

        public void setInfo(String info) {
            this.info = info;
        }

        public double getVal() {
            return val;
        }

        public void setVal(double val) {
            this.val = val;
        }

        @Override
        public String toString() {
            return "DataInfo{" +
                    "ts=" + ts +
                    ", info='" + info + '\'' +
                    ", val='" + val + '\'' +
                    '}';
        }

        public DataInfo(long ts, String info, double val) {
            this.ts = ts;
            this.info = info;
            this.val = val;
        }

        public DataInfo() {

        }
    }

    //自定义表函数,必须定义一个public的 名字为eval方法
    public static class MyTableFunction extends TableFunction<Tuple2<Double, Double>>{
        public void eval(double d){
            collect(new Tuple2<>(d, Math.floor(d)));
        }
    }
}

nc -lk 9999

输入:

复制代码
1690000001,ffff,87.12

执行结果

相关推荐
sonrisa_21 小时前
collections模块
python
孟意昶21 小时前
Spark专题-第一部分:Spark 核心概述(2)-Spark 应用核心组件剖析
大数据·spark·big data
折翼的恶魔21 小时前
数据分析:排序
python·数据分析·pandas
天雪浪子1 天前
Python入门教程之赋值运算符
开发语言·python
站大爷IP1 天前
5个技巧写出专业Python代码:从新手到进阶的实用指南
python
IT学长编程1 天前
计算机毕业设计 基于Hadoop的健康饮食推荐系统的设计与实现 Java 大数据毕业设计 Hadoop毕业设计选题【附源码+文档报告+安装调试】
java·大数据·hadoop·毕业设计·课程设计·推荐算法·毕业论文
hrrrrb1 天前
【Python】字符串
java·前端·python
AAA修煤气灶刘哥1 天前
Kafka 入门不踩坑!从概念到搭环境,后端 er 看完就能用
大数据·后端·kafka
大翻哥哥1 天前
Python 2025:低代码开发与自动化运维的新纪元
运维·python·低代码
Source.Liu1 天前
【Pywinauto库】12.2 pywinauto.element_info 后端内部实施模块
windows·python·自动化