flink table view datastream互转

case class outer(f1:String,f2:Inner)

case class outerV1(f1:String,f2:Inner,f3:Int)

case class Inner(f3:String,f4:Int)

测试代码

Scala 复制代码
package com.yy.table.convert

import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.table.types.DataType


object streamPOJO2table {
  case class outer(f1:String,f2:Inner)
  case class outerV1(f1:String,f2:Inner,f3:Int)
  case class Inner(f3:String,f4:Int)

  def main(args: Array[String]): Unit = {
    // flink1.13 流处理环境初始化
    val env = StreamExecutionEnvironment.getExecutionEnvironment

    val tEnv = StreamTableEnvironment.create(env)
    import org.apache.flink.streaming.api.scala._
    val ds1: DataStream[outer] = env.fromElements(
      outer("a",Inner("b",2))
      ,outer("d",Inner("e",4))
    )


    val table1: Table = tEnv.fromDataStream(ds1)
//    table1
//      .execute()
//      .print()
    /*
    +----+--------------------------------+--------------------------------+
| op |                             f1 |                             f2 |
+----+--------------------------------+--------------------------------+
| +I |                              a |                   (f3=b, f4=2) |
| +I |                              d |                   (f3=e, f4=4) |
+----+--------------------------------+--------------------------------+
     */



//    table1
//      .print()
    /*
    5> +I[d, Inner(e,4)]
4> +I[a, Inner(b,2)]
     */


    tEnv.createTemporaryView("view1", ds1)

    val tableResult1: TableResult = tEnv.executeSql("select f1,f2,(f2.f4 + 100) as f3 from view1")
    tableResult1.print()
    /*
    +----+--------------------------------+--------------------------------+-------------+
| op |                             f1 |                             f2 |          f3 |
+----+--------------------------------+--------------------------------+-------------+
| +I |                              a |                   (f3=b, f4=2) |         102 |
| +I |                              d |                   (f3=e, f4=4) |         104 |
+----+--------------------------------+--------------------------------+-------------+
     */


//
    val t1: Table = tEnv.sqlQuery("select f1,f2,(f2.f4 + 100) as f3 from view1")
//    t1.print()

//    println(t1.getResolvedSchema)
    /*
+----+--------------------------------+--------------------------------+-------------+
| op |                             f1 |                             f2 |          f3 |
+----+--------------------------------+--------------------------------+-------------+
| +I |                              a |                   (f3=b, f4=2) |         102 |
| +I |                              d |                   (f3=e, f4=4) |         104 |
+----+--------------------------------+--------------------------------+-------------+
2 rows in set
(
  `f1` STRING,
  `f2` *com.yy.table.convert.streamPOJO2table$Inner<`f3` STRING, `f4` INT NOT NULL>* NOT NULL,
  `f3` INT NOT NULL
)
     */

    println("---- 1 -------")
    // tableResult转datastream
    val o1: DataStream[outerV1] = tEnv.toDataStream[outerV1](t1,classOf[outerV1])
//    o1.print()

    println("---- 2 -------")

    tEnv.executeSql(
      """
        |select
        |f1
        |,f2.f3
        |,f2.f4
        |from view1
        |""".stripMargin)
//      .print()
    /*
    +----+--------------------------------+--------------------------------+--------------------------------+
| op |                             f1 |                             f3 |                             f4 |
+----+--------------------------------+--------------------------------+--------------------------------+
| +I |                              a |                              b |                              c |
| +I |                              d |                              e |                              f |
+----+--------------------------------+--------------------------------+--------------------------------+
     */

    tEnv.executeSql(
        """
          |select
          |f1
          |,(f2.f3,f2.f4)
          |from view1
          |""".stripMargin)
//      .print()


    env.execute("jobName1")
  }

}
相关推荐
武子康8 小时前
大数据-113 Flink 源算子详解:非并行源(Non-Parallel Source)的原理与应用场景
大数据·后端·flink
Hello.Reader10 小时前
优化 Flink 基于状态的 ETL少 Shuffle、不膨胀、可落地的工程
flink·php·etl
菜鸡儿齐12 小时前
flink api-datastream api-source算子
大数据·flink
lifallen13 小时前
Flink SQL 查询 核心概念与实战指南
大数据·数据库·sql·flink
Hello.Reader2 天前
Flink 架构组件、任务链路、Slot 资源与集群形态
大数据·架构·flink
武子康2 天前
大数据-112 Flink DataStream API :数据源、转换与输出 文件、Socket 到 Kafka 的完整流程
大数据·后端·flink
Hello.Reader2 天前
Flink 有状态流处理State、Keyed State、Checkpoint、对齐/不对齐与生产实践
大数据·flink·linq
菜鸡儿齐2 天前
flink批处理-时间和窗口
大数据·flink
一休哥助手2 天前
Apache Flink:流处理革命的领导者与新一代大数据计算引擎
大数据·flink·apache
Hello.Reader2 天前
Flink 有状态与时间敏感流处理从 Concepts 到 API 实战
大数据·flink