39、Flink 的窗口函数 WindowFunction 示例

bash 复制代码
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.KeyedStateStore;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;

public class _06_WindowFunction {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        DataStreamSource<String> input = env.socketTextStream("localhost", 8888);

        // ReduceFunction
        input.keyBy(e -> e)
                .window(TumblingProcessingTimeWindows.of(Duration.ofSeconds(5)))
                .reduce(new ReduceFunction<String>() {
                    public String reduce(String v1, String v2) {
                        return v1 + "-" + v2;
                    }
                })
                .print();

        // AggregateFunction
        input.keyBy(e -> e)
                .window(TumblingProcessingTimeWindows.of(Duration.ofSeconds(5)))
                .aggregate(new MyAggregateFunction());

        // ProcessWindowFunction
        input
                .keyBy(e -> e)
                .window(TumblingEventTimeWindows.of(Duration.ofSeconds(5)))
                .process(new MyProcessWindowFunction());

        // 增量聚合的 ProcessWindowFunction
        // 使用 ReduceFunction 增量聚合
        input
                .keyBy(e -> e)
                .window(TumblingProcessingTimeWindows.of(Duration.ofSeconds(5)))
                .reduce(new MyReduceProcessFunction(), new MyProcessWindowFunction2());

        // 使用 AggregateFunction 增量聚合
        input
                .keyBy(e -> e)
                .window(TumblingProcessingTimeWindows.of(Duration.ofSeconds(5)))
                .aggregate(new AverageAggregate(), new MyProcessWindowFunction3());

        // 在 ProcessWindowFunction 中使用 per-window state
        // ProcessWindowFunction
        input
                .keyBy(e -> e)
                .window(TumblingEventTimeWindows.of(Duration.ofSeconds(5)))
                .process(new ProcessWindowFunction<String, String, String, TimeWindow>() {
                    @Override
                    public void process(String s, ProcessWindowFunction<String, String, String, TimeWindow>.Context context, Iterable<String> iterable, Collector<String> collector) throws Exception {
                        // 访问全局的 keyed state
                        KeyedStateStore globalState = context.globalState();

                        // 访问作用域仅限于当前窗口的 keyed state
                        KeyedStateStore windowState = context.windowState();
                    }
                });

        env.execute();
    }
}

class MyAggregateFunction implements AggregateFunction<String, String, String> {

    @Override
    public String createAccumulator() {
        return "createAccumulator->";
    }

    @Override
    public String add(String s1, String s2) {
        return s1 + "-" + s2;
    }

    @Override
    public String getResult(String s) {
        return "res=>" + s;
    }

    @Override
    public String merge(String s1, String acc1) {
        return "merge=>" + s1 + ",=>" + acc1;
    }
}

class MyProcessWindowFunction extends ProcessWindowFunction<String, String, String, TimeWindow> {

    @Override
    public void process(String s, ProcessWindowFunction<String, String, String, TimeWindow>.Context context, Iterable<String> iterable, Collector<String> collector) throws Exception {
        for (String res : iterable) {
            collector.collect(res);
        }
    }
}

class MyReduceProcessFunction implements ReduceFunction<String> {

    public String reduce(String r1, String r2) {
        return r1 + "-" + r2;
    }
}

class MyProcessWindowFunction2 extends ProcessWindowFunction<String, Tuple2<Long, String>, String, TimeWindow> {

    public void process(String key,
                        Context context,
                        Iterable<String> minReadings,
                        Collector<Tuple2<Long, String>> out) {
        String min = minReadings.iterator().next();
        out.collect(new Tuple2<>(context.window().getStart(), min));
    }
}

class AverageAggregate implements AggregateFunction<String, String, String> {

    @Override
    public String createAccumulator() {
        return "createAccumulator=>";
    }

    @Override
    public String add(String s1, String s2) {
        return s1 + "-" + s2;
    }

    @Override
    public String getResult(String s) {
        return s;
    }

    @Override
    public String merge(String s, String acc1) {
        return "merge->" + s + "-" + acc1;
    }
}

class MyProcessWindowFunction3 extends ProcessWindowFunction<String, Tuple2<String, Double>, String, TimeWindow> {

    public void process(String key,
                        Context context,
                        Iterable<String> averages,
                        Collector<Tuple2<String, Double>> out) {
        String average = averages.iterator().next();
        out.collect(new Tuple2<>(key, 1.0));
    }
}
相关推荐
武子康18 分钟前
大数据-210 如何在Scikit-Learn中实现逻辑回归及正则化详解(L1与L2)
大数据·后端·机器学习
xiaobaishuoAI22 分钟前
全链路性能优化实战指南:从瓶颈定位到极致优化
大数据·人工智能·科技·百度·geo
乾元25 分钟前
如何把 CCIE / HCIE 的实验案例改造成 AI 驱动的工程项目——从“实验室能力”到“可交付系统”的完整迁移路径
大数据·运维·网络·人工智能·深度学习·安全·机器学习
xiaobaishuoAI33 分钟前
后端工程化实战指南:从规范到自动化,打造高效协作体系
java·大数据·运维·人工智能·maven·devops·geo
俊哥大数据41 分钟前
【实战项目5】基于Flink新闻热搜大数据实时分析项目
大数据·flink
俊哥大数据1 小时前
【实战项目3】基于Flink广告投放业务领域大数据实时分析项目
大数据·flink
Hello.Reader1 小时前
PyFlink Configuration 一次讲透怎么配、配哪些、怎么“调得快且稳”
运维·服务器·python·flink
Hello.Reader1 小时前
PyFlink Table API / DataStream API / UDF / 依赖管理 / 运行时模式一篇打通(含示例代码与避坑)
python·flink
学好statistics和DS1 小时前
Git 同步冲突
大数据·git·elasticsearch
Hello.Reader1 小时前
PyFlink FAQ 高频踩坑速查版
python·flink