flink table view datastream互转

case class outer(f1:String,f2:Inner)

case class outerV1(f1:String,f2:Inner,f3:Int)

case class Inner(f3:String,f4:Int)

测试代码

Scala 复制代码
package com.yy.table.convert

import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.table.types.DataType


object streamPOJO2table {
  case class outer(f1:String,f2:Inner)
  case class outerV1(f1:String,f2:Inner,f3:Int)
  case class Inner(f3:String,f4:Int)

  def main(args: Array[String]): Unit = {
    // flink1.13 流处理环境初始化
    val env = StreamExecutionEnvironment.getExecutionEnvironment

    val tEnv = StreamTableEnvironment.create(env)
    import org.apache.flink.streaming.api.scala._
    val ds1: DataStream[outer] = env.fromElements(
      outer("a",Inner("b",2))
      ,outer("d",Inner("e",4))
    )


    val table1: Table = tEnv.fromDataStream(ds1)
//    table1
//      .execute()
//      .print()
    /*
    +----+--------------------------------+--------------------------------+
| op |                             f1 |                             f2 |
+----+--------------------------------+--------------------------------+
| +I |                              a |                   (f3=b, f4=2) |
| +I |                              d |                   (f3=e, f4=4) |
+----+--------------------------------+--------------------------------+
     */



//    table1
//      .print()
    /*
    5> +I[d, Inner(e,4)]
4> +I[a, Inner(b,2)]
     */


    tEnv.createTemporaryView("view1", ds1)

    val tableResult1: TableResult = tEnv.executeSql("select f1,f2,(f2.f4 + 100) as f3 from view1")
    tableResult1.print()
    /*
    +----+--------------------------------+--------------------------------+-------------+
| op |                             f1 |                             f2 |          f3 |
+----+--------------------------------+--------------------------------+-------------+
| +I |                              a |                   (f3=b, f4=2) |         102 |
| +I |                              d |                   (f3=e, f4=4) |         104 |
+----+--------------------------------+--------------------------------+-------------+
     */


//
    val t1: Table = tEnv.sqlQuery("select f1,f2,(f2.f4 + 100) as f3 from view1")
//    t1.print()

//    println(t1.getResolvedSchema)
    /*
+----+--------------------------------+--------------------------------+-------------+
| op |                             f1 |                             f2 |          f3 |
+----+--------------------------------+--------------------------------+-------------+
| +I |                              a |                   (f3=b, f4=2) |         102 |
| +I |                              d |                   (f3=e, f4=4) |         104 |
+----+--------------------------------+--------------------------------+-------------+
2 rows in set
(
  `f1` STRING,
  `f2` *com.yy.table.convert.streamPOJO2table$Inner<`f3` STRING, `f4` INT NOT NULL>* NOT NULL,
  `f3` INT NOT NULL
)
     */

    println("---- 1 -------")
    // tableResult转datastream
    val o1: DataStream[outerV1] = tEnv.toDataStream[outerV1](t1,classOf[outerV1])
//    o1.print()

    println("---- 2 -------")

    tEnv.executeSql(
      """
        |select
        |f1
        |,f2.f3
        |,f2.f4
        |from view1
        |""".stripMargin)
//      .print()
    /*
    +----+--------------------------------+--------------------------------+--------------------------------+
| op |                             f1 |                             f3 |                             f4 |
+----+--------------------------------+--------------------------------+--------------------------------+
| +I |                              a |                              b |                              c |
| +I |                              d |                              e |                              f |
+----+--------------------------------+--------------------------------+--------------------------------+
     */

    tEnv.executeSql(
        """
          |select
          |f1
          |,(f2.f3,f2.f4)
          |from view1
          |""".stripMargin)
//      .print()


    env.execute("jobName1")
  }

}
相关推荐
岁岁种桃花儿1 天前
Flink CDC从入门到上天系列第一篇:Flink CDC简易应用
大数据·架构·flink
Apache Flink1 天前
Apache Flink Agents 0.2.0 发布公告
大数据·flink·apache
驾数者1 天前
Flink SQL实时数仓实战:基于Flink SQL的完整项目案例
sql·flink·linq
代码匠心2 天前
从零开始学Flink:Flink SQL 极简入门
大数据·flink·flink sql·大数据处理
Apache Flink2 天前
Flink 实时计算 x SLS 存储下推:阿里云 OpenAPI 网关监控平台实践
大数据·阿里云·flink·云计算
yumgpkpm2 天前
华为昇腾300T A2训练、微调Qwen过程,带保姆式命令,麒麟操作系统+鲲鹏CPU
hive·hadoop·华为·flink·spark·kafka·hbase
迎仔3 天前
10-流处理引擎Flink介绍:大数据世界的“实时监控中心”
大数据·flink
ApacheSeaTunnel4 天前
Apache SeaTunnel Zeta、Flink、Spark 怎么选?底层原理 + 实战对比一次讲透
大数据·flink·spark·开源·数据集成·seatunnel·数据同步