39、Flink 的窗口函数 WindowFunction 示例

bash 复制代码
import org.apache.flink.api.common.functions.AggregateFunction;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.KeyedStateStore;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;

import java.time.Duration;

public class _06_WindowFunction {
    public static void main(String[] args) throws Exception {
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        DataStreamSource<String> input = env.socketTextStream("localhost", 8888);

        // ReduceFunction
        input.keyBy(e -> e)
                .window(TumblingProcessingTimeWindows.of(Duration.ofSeconds(5)))
                .reduce(new ReduceFunction<String>() {
                    public String reduce(String v1, String v2) {
                        return v1 + "-" + v2;
                    }
                })
                .print();

        // AggregateFunction
        input.keyBy(e -> e)
                .window(TumblingProcessingTimeWindows.of(Duration.ofSeconds(5)))
                .aggregate(new MyAggregateFunction());

        // ProcessWindowFunction
        input
                .keyBy(e -> e)
                .window(TumblingEventTimeWindows.of(Duration.ofSeconds(5)))
                .process(new MyProcessWindowFunction());

        // 增量聚合的 ProcessWindowFunction
        // 使用 ReduceFunction 增量聚合
        input
                .keyBy(e -> e)
                .window(TumblingProcessingTimeWindows.of(Duration.ofSeconds(5)))
                .reduce(new MyReduceProcessFunction(), new MyProcessWindowFunction2());

        // 使用 AggregateFunction 增量聚合
        input
                .keyBy(e -> e)
                .window(TumblingProcessingTimeWindows.of(Duration.ofSeconds(5)))
                .aggregate(new AverageAggregate(), new MyProcessWindowFunction3());

        // 在 ProcessWindowFunction 中使用 per-window state
        // ProcessWindowFunction
        input
                .keyBy(e -> e)
                .window(TumblingEventTimeWindows.of(Duration.ofSeconds(5)))
                .process(new ProcessWindowFunction<String, String, String, TimeWindow>() {
                    @Override
                    public void process(String s, ProcessWindowFunction<String, String, String, TimeWindow>.Context context, Iterable<String> iterable, Collector<String> collector) throws Exception {
                        // 访问全局的 keyed state
                        KeyedStateStore globalState = context.globalState();

                        // 访问作用域仅限于当前窗口的 keyed state
                        KeyedStateStore windowState = context.windowState();
                    }
                });

        env.execute();
    }
}

class MyAggregateFunction implements AggregateFunction<String, String, String> {

    @Override
    public String createAccumulator() {
        return "createAccumulator->";
    }

    @Override
    public String add(String s1, String s2) {
        return s1 + "-" + s2;
    }

    @Override
    public String getResult(String s) {
        return "res=>" + s;
    }

    @Override
    public String merge(String s1, String acc1) {
        return "merge=>" + s1 + ",=>" + acc1;
    }
}

class MyProcessWindowFunction extends ProcessWindowFunction<String, String, String, TimeWindow> {

    @Override
    public void process(String s, ProcessWindowFunction<String, String, String, TimeWindow>.Context context, Iterable<String> iterable, Collector<String> collector) throws Exception {
        for (String res : iterable) {
            collector.collect(res);
        }
    }
}

class MyReduceProcessFunction implements ReduceFunction<String> {

    public String reduce(String r1, String r2) {
        return r1 + "-" + r2;
    }
}

class MyProcessWindowFunction2 extends ProcessWindowFunction<String, Tuple2<Long, String>, String, TimeWindow> {

    public void process(String key,
                        Context context,
                        Iterable<String> minReadings,
                        Collector<Tuple2<Long, String>> out) {
        String min = minReadings.iterator().next();
        out.collect(new Tuple2<>(context.window().getStart(), min));
    }
}

class AverageAggregate implements AggregateFunction<String, String, String> {

    @Override
    public String createAccumulator() {
        return "createAccumulator=>";
    }

    @Override
    public String add(String s1, String s2) {
        return s1 + "-" + s2;
    }

    @Override
    public String getResult(String s) {
        return s;
    }

    @Override
    public String merge(String s, String acc1) {
        return "merge->" + s + "-" + acc1;
    }
}

class MyProcessWindowFunction3 extends ProcessWindowFunction<String, Tuple2<String, Double>, String, TimeWindow> {

    public void process(String key,
                        Context context,
                        Iterable<String> averages,
                        Collector<Tuple2<String, Double>> out) {
        String average = averages.iterator().next();
        out.collect(new Tuple2<>(key, 1.0));
    }
}
相关推荐
阿里云大数据AI技术6 小时前
大数据公有云市场第一,阿里云占比47%!
大数据
Lx35211 小时前
Hadoop容错机制深度解析:保障作业稳定运行
大数据·hadoop
T062051415 小时前
工具变量-5G试点城市DID数据(2014-2025年
大数据
向往鹰的翱翔16 小时前
BKY莱德因:5大黑科技逆转时光
大数据·人工智能·科技·生活·健康医疗
鸿乃江边鸟16 小时前
向量化和列式存储
大数据·sql·向量化
IT毕设梦工厂17 小时前
大数据毕业设计选题推荐-基于大数据的客户购物订单数据分析与可视化系统-Hadoop-Spark-数据可视化-BigData
大数据·hadoop·数据分析·spark·毕业设计·源码·bigdata
java水泥工17 小时前
基于Echarts+HTML5可视化数据大屏展示-白茶大数据溯源平台V2
大数据·echarts·html5
广州腾科助你拿下华为认证19 小时前
华为考试:HCIE数通考试难度分析
大数据·华为
在未来等你21 小时前
Elasticsearch面试精讲 Day 17:查询性能调优实践
大数据·分布式·elasticsearch·搜索引擎·面试
大数据CLUB1 天前
基于spark的澳洲光伏发电站选址预测
大数据·hadoop·分布式·数据分析·spark·数据开发