flink入门代码

flink入门代码

java 复制代码
package com.lyj.sx.flink.wordCount;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;


public class LocalWithWebUI {
    public static void main(String[] args) throws Exception {
         StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
         DataStreamSource<String> source = env.socketTextStream("pxj62", 8889);
         SingleOutputStreamOperator<Tuple2<String, Integer>> summed = source.flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public void flatMap(String s, Collector<Tuple2<String, Integer>> collector) throws Exception {
                for (String string : s.split(" ")) {
                    collector.collect(Tuple2.of(string, 1));
                }
            }
        }).keyBy(new KeySelector<Tuple2<String, Integer>, String>() {
            @Override
            public String getKey(Tuple2<String, Integer> s) throws Exception {
                return s.f0;
            }
        }).sum(1);
         summed.print();
         env.execute("pxj");
    }
}
java 复制代码
package com.lyj.sx.flink.wordCount;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

public class StreamingWordCount {
    public static void main(String[] args) throws  Exception{
         StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
         int parallelism = env.getParallelism();
        System.out.println("parallelism:" + parallelism);
        DataStreamSource<String> source = env.socketTextStream("pxj62", 8881);

        System.out.println("source"+source.getParallelism());
         SingleOutputStreamOperator<Tuple2<String, Integer>> summed = source.flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public void flatMap(String s, Collector<Tuple2<String, Integer>> collector) throws Exception {
                String[] strings = s.split(" ");
                for (String string : strings) {
                    collector.collect(Tuple2.of(string, 1));
                }
            }
        }).keyBy(new KeySelector<Tuple2<String, Integer>, String>() {
            @Override
            public String getKey(Tuple2<String, Integer> s) throws Exception {
                return s.f0;
            }
        }).sum(1);
         summed.print();
         env.execute("pxj");
    }
}
java 复制代码
package com.lyj.sx.flink.wordCount;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;


public class StreamingWordCountV3 {
    public static void main(String[] args) throws Exception {
         StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
         DataStreamSource<String> source = env.socketTextStream("pxj62", 8889);
         SingleOutputStreamOperator<Tuple2<String, Integer>> data = source.flatMap(new MyFlatMap());
         SingleOutputStreamOperator<Tuple2<String, Integer>> summed = data.keyBy(0).sum(1);
         summed.print();
         env.execute("pxj");
    }

    public static  class MyFlatMap implements FlatMapFunction<String, Tuple2<String,Integer>> {

        @Override
        public void flatMap(String s, Collector<Tuple2<String, Integer>> collector) throws Exception {
            for (String string : s.split(" ")) {
                collector.collect(Tuple2.of(string,1));
            }
        }
    }
}
java 复制代码
package com.lyj.sx.flink.day02;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;

public class ReadTextFileDemo {
    public static void main(String[] args) throws Exception {
         StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
         DataStreamSource<String> source = env.readTextFile("data/a.txt");
         source.map(new MapFunction<String, Tuple2<String,Integer>>() {
             Tuple2<String,Integer> s1;

             @Override
             public Tuple2<String, Integer> map(String s) throws Exception {

                 String[] strings = s.split(" ");
                 for (String string : strings) {
                      s1=Tuple2.of(string,1);
                 }
                 return s1;
             }
         }).print();
         env.execute("pxj");

    }
}
java 复制代码
package com.lyj.sx.flink.day02;

import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;

import java.util.Arrays;
import java.util.List;
import java.util.UUID;

public class CustomNoParSource {
    public static void main(String[] args) throws Exception {
         StreamExecutionEnvironment env = StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
         System.out.println("环境执行的并行度:"+env.getParallelism());
         DataStreamSource<String> source = env.addSource(new Mysource2());
        System.out.println("source的并行度为:"+source.getParallelism());
        source.print();
//         env.execute("pxj");
        env.execute();

    }

    private static class Mysource1 implements SourceFunction<String> {
        //启动,并产生数据,产生的数据用SourceContext输出
        @Override
        public void run(SourceContext<String> cx) throws Exception {
             List<String> lists = Arrays.asList("a", "b", "c", "pxj", "sx", "lyj");
            for (String list : lists) {
               cx.collect(list);
            }

        }
        //将Source停掉
        @Override
        public void cancel() {

        }
    }

    private static class Mysource2 implements  SourceFunction<String>{
        private Boolean flag=true;
        @Override
        public void run(SourceContext<String> cx) throws Exception {
            System.out.println("run....");
            while (flag){
                cx.collect(UUID.randomUUID().toString());
            }

        }

        @Override
        public void cancel() {
            System.out.println("cancel");
            flag=false;
        }
    }
}

作者:pxj_sx(潘陈)

日期:2024-04-11 0:26:20

相关推荐
天远云服15 分钟前
Go 语言高并发实战:批量清洗天远借贷行为验证API (JRZQ8203) 的时间序列数据
大数据·api
Hello.Reader15 分钟前
Flink 系统内置函数(Built-in Functions)分类、典型用法与选型建议
大数据·flink·excel
AI营销实验室43 分钟前
AI 工具何高质量的为销售线索打分?
大数据·人工智能
打码人的日常分享1 小时前
企业数据资产管控和数据治理解决方案
大数据·运维·网络·人工智能·云计算
数字冰雹2 小时前
从“东数西算”到智慧机房:数字孪生如何重塑数据中心的“智能大脑”?
大数据·人工智能·数据可视化
qq_348231852 小时前
市场快评 · 今日复盘要点20251224
大数据
TG:@yunlaoda360 云老大2 小时前
如何在华为云国际站代理商控制台进行SFS Turbo的基础状态核查?
大数据·服务器·华为云·php
Yyyyy123jsjs2 小时前
XAUUSD API 周末无推送,会影响回测与实盘一致性吗?
大数据·区块链
是Judy咋!3 小时前
Elasticsearch---单机部署
大数据·elasticsearch·搜索引擎
天辛大师3 小时前
2026年丙午年火马年周易运势与AI预测大模型启示录
大数据·人工智能·游戏·随机森林·启发式算法