Spark 之 SparkPlanInfo

src/main/scala/org/apache/spark/sql/execution/SparkPlanInfo.scala

c 复制代码
    new SparkPlanInfo(
      plan.nodeName,
      plan.simpleString(SQLConf.get.maxToStringFields),
      children.map(fromSparkPlan),
      metadata,
      metrics)

src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala

c 复制代码
  override def simpleString(maxFields: Int): String = statePrefix + super.simpleString(maxFields)

src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala

c 复制代码
  /**
   * ONE line description of this node.
   * @param maxFields Maximum number of fields that will be converted to strings.
   *                  Any elements beyond the limit will be dropped.
   */
  def simpleString(maxFields: Int): String = s"$nodeName ${argString(maxFields)}".trim
c 复制代码
  /** Returns a string representing the arguments to this node, minus any children */
  def argString(maxFields: Int): String = stringArgs.flatMap {
    case tn: TreeNode[_] if allChildren.contains(tn) => Nil
    case Some(tn: TreeNode[_]) if allChildren.contains(tn) => Nil
    case Some(tn: TreeNode[_]) => tn.simpleString(maxFields) :: Nil
    case tn: TreeNode[_] => tn.simpleString(maxFields) :: Nil
    case seq: Seq[Any] if seq.toSet.subsetOf(allChildren.asInstanceOf[Set[Any]]) => Nil
    case iter: Iterable[_] if iter.isEmpty => Nil
    case array: Array[_] if array.isEmpty => Nil
    case xs @ (_: Seq[_] | _: Set[_] | _: Array[_]) =>
      formatArg(xs, maxFields) :: Nil
    case null => Nil
    case None => Nil
    case Some(null) => Nil
    case Some(table: CatalogTable) =>
      stringArgsForCatalogTable(table)
    case Some(any) => any :: Nil
    case map: CaseInsensitiveStringMap =>
      redactMapString(map.asCaseSensitiveMap().asScala, maxFields)
    case map: Map[_, _] =>
      redactMapString(map, maxFields)
    case t: TableSpec =>
      t.copy(properties = Utils.redact(t.properties).toMap,
        options = Utils.redact(t.options).toMap) :: Nil
    case table: CatalogTable =>
      stringArgsForCatalogTable(table)

    case other => other :: Nil
  }.mkString(", ")

src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala

c 复制代码
override def toString: String = s"$child AS $name#${exprId.id}$typeSuffix$delaySuffix"
相关推荐
阿里云大数据AI技术10 小时前
StarRocks 助力数禾科技构建实时数仓:从数据孤岛到智能决策
大数据
Lx35214 小时前
Hadoop数据处理优化:减少Shuffle阶段的性能损耗
大数据·hadoop
努力的小郑17 小时前
从一次分表实践谈起:我们真的需要复杂的分布式ID吗?
分布式·后端·面试
武子康18 小时前
大数据-99 Spark Streaming 数据源全面总结:原理、应用 文件流、Socket、RDD队列流
大数据·后端·spark
AAA修煤气灶刘哥1 天前
别让Redis「歪脖子」!一次搞定数据倾斜与请求倾斜的捉妖记
redis·分布式·后端
阿里云大数据AI技术1 天前
大数据公有云市场第一,阿里云占比47%!
大数据
Lx3522 天前
Hadoop容错机制深度解析:保障作业稳定运行
大数据·hadoop
Aomnitrix2 天前
知识管理新范式——cpolar+Wiki.js打造企业级分布式知识库
开发语言·javascript·分布式
程序消消乐2 天前
Kafka 入门指南:从 0 到 1 构建你的 Kafka 知识基础入门体系
分布式·kafka