39、Flink 的窗口函数 WindowFunction 示例

bash 复制代码
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.KeyedStateStore;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;

public class _06_WindowFunction {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        DataStreamSource<String> input = env.socketTextStream("localhost", 8888);

        // ReduceFunction
        input.keyBy(e -> e)
                .window(TumblingProcessingTimeWindows.of(Duration.ofSeconds(5)))
                .reduce(new ReduceFunction<String>() {
                    public String reduce(String v1, String v2) {
                        return v1 + "-" + v2;
                    }
                })
                .print();

        // AggregateFunction
        input.keyBy(e -> e)
                .window(TumblingProcessingTimeWindows.of(Duration.ofSeconds(5)))
                .aggregate(new MyAggregateFunction());

        // ProcessWindowFunction
        input
                .keyBy(e -> e)
                .window(TumblingEventTimeWindows.of(Duration.ofSeconds(5)))
                .process(new MyProcessWindowFunction());

        // 增量聚合的 ProcessWindowFunction
        // 使用 ReduceFunction 增量聚合
        input
                .keyBy(e -> e)
                .window(TumblingProcessingTimeWindows.of(Duration.ofSeconds(5)))
                .reduce(new MyReduceProcessFunction(), new MyProcessWindowFunction2());

        // 使用 AggregateFunction 增量聚合
        input
                .keyBy(e -> e)
                .window(TumblingProcessingTimeWindows.of(Duration.ofSeconds(5)))
                .aggregate(new AverageAggregate(), new MyProcessWindowFunction3());

        // 在 ProcessWindowFunction 中使用 per-window state
        // ProcessWindowFunction
        input
                .keyBy(e -> e)
                .window(TumblingEventTimeWindows.of(Duration.ofSeconds(5)))
                .process(new ProcessWindowFunction<String, String, String, TimeWindow>() {
                    @Override
                    public void process(String s, ProcessWindowFunction<String, String, String, TimeWindow>.Context context, Iterable<String> iterable, Collector<String> collector) throws Exception {
                        // 访问全局的 keyed state
                        KeyedStateStore globalState = context.globalState();

                        // 访问作用域仅限于当前窗口的 keyed state
                        KeyedStateStore windowState = context.windowState();
                    }
                });

        env.execute();
    }
}

class MyAggregateFunction implements AggregateFunction<String, String, String> {

    @Override
    public String createAccumulator() {
        return "createAccumulator->";
    }

    @Override
    public String add(String s1, String s2) {
        return s1 + "-" + s2;
    }

    @Override
    public String getResult(String s) {
        return "res=>" + s;
    }

    @Override
    public String merge(String s1, String acc1) {
        return "merge=>" + s1 + ",=>" + acc1;
    }
}

class MyProcessWindowFunction extends ProcessWindowFunction<String, String, String, TimeWindow> {

    @Override
    public void process(String s, ProcessWindowFunction<String, String, String, TimeWindow>.Context context, Iterable<String> iterable, Collector<String> collector) throws Exception {
        for (String res : iterable) {
            collector.collect(res);
        }
    }
}

class MyReduceProcessFunction implements ReduceFunction<String> {

    public String reduce(String r1, String r2) {
        return r1 + "-" + r2;
    }
}

class MyProcessWindowFunction2 extends ProcessWindowFunction<String, Tuple2<Long, String>, String, TimeWindow> {

    public void process(String key,
                        Context context,
                        Iterable<String> minReadings,
                        Collector<Tuple2<Long, String>> out) {
        String min = minReadings.iterator().next();
        out.collect(new Tuple2<>(context.window().getStart(), min));
    }
}

class AverageAggregate implements AggregateFunction<String, String, String> {

    @Override
    public String createAccumulator() {
        return "createAccumulator=>";
    }

    @Override
    public String add(String s1, String s2) {
        return s1 + "-" + s2;
    }

    @Override
    public String getResult(String s) {
        return s;
    }

    @Override
    public String merge(String s, String acc1) {
        return "merge->" + s + "-" + acc1;
    }
}

class MyProcessWindowFunction3 extends ProcessWindowFunction<String, Tuple2<String, Double>, String, TimeWindow> {

    public void process(String key,
                        Context context,
                        Iterable<String> averages,
                        Collector<Tuple2<String, Double>> out) {
        String average = averages.iterator().next();
        out.collect(new Tuple2<>(key, 1.0));
    }
}
相关推荐
Guheyunyi1 小时前
消防管理系统如何重构现代空间防御体系
大数据·运维·人工智能·安全·信息可视化·重构
二进制_博客3 小时前
spark on hive 还是 hive on spark?
大数据·hive·spark
智海观潮3 小时前
Spark RDD详解 —— RDD特性、lineage、缓存、checkpoint、依赖关系
大数据·缓存·spark
一个会的不多的人7 小时前
数字化转型:概念性名词浅谈(第七十二讲)
大数据·人工智能·制造·数字化转型
数据智能老司机7 小时前
在 Databricks 上的 Unity Catalog 数据治理——Unity Catalog 的内部机制
大数据·架构
gb42152879 小时前
elasticsearch索引多长时间刷新一次(智能刷新索引根据数据条数去更新)
大数据·elasticsearch·jenkins
IT毕设梦工厂10 小时前
大数据毕业设计选题推荐-基于大数据的人体生理指标管理数据可视化分析系统-Hadoop-Spark-数据可视化-BigData
大数据·hadoop·信息可视化·spark·毕业设计·源码·bigdata
数在表哥10 小时前
从数据沼泽到智能决策:数据驱动与AI融合的中台建设方法论与技术实践指南(四)
大数据·人工智能
爱思德学术10 小时前
中国计算机学会(CCF)推荐学术会议-C(数据库/数据挖掘/内容检索):PAKDD 2026
大数据·机器学习·数据挖掘·知识发现
云淡风轻~~12 小时前
构建和部署Spark、Hadoop与Zeppelin集成环境
大数据·hadoop·spark