Skip to content

[SPARK-48660][SQL] Fix explain result for CreateTableAsSelect #51013

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 6 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ import org.apache.spark.sql.catalyst.trees.{LeafLike, UnaryLike}
import org.apache.spark.sql.connector.ExternalCommandRunner
import org.apache.spark.sql.errors.QueryCompilationErrors
import org.apache.spark.sql.execution.{CommandExecutionMode, ExplainMode, LeafExecNode, SparkPlan, UnaryExecNode}
import org.apache.spark.sql.execution.command.CreateDataSourceTableAsSelectCommand
import org.apache.spark.sql.execution.datasources.DataSource
import org.apache.spark.sql.execution.metric.SQLMetric
import org.apache.spark.sql.execution.streaming.IncrementalExecution
Expand Down Expand Up @@ -65,6 +66,37 @@ case class ExecutedCommandExec(cmd: RunnableCommand) extends LeafExecNode {

override lazy val metrics: Map[String, SQLMetric] = cmd.metrics

// Cache the optimized command to avoid recomputation
@transient private lazy val executedQuery: Option[QueryPlan[_]] = {
cmd match {
case cmd: CreateDataSourceTableAsSelectCommand =>
try {
SparkSession.getActiveSession match {
case Some(spark) =>
try {
val qe = spark.sessionState.executePlan(cmd.query)
Some(qe.executedPlan)
} catch {
case _: Exception => Some(cmd.query)
}
case None => Some(cmd.query)
}
} catch {
case _: Exception => Some(cmd.query)
}
case _ => None
}
}

// Override to return the optimized command instead of the command
override def innerChildren: Seq[QueryPlan[_]] = {
cmd match {
case cmd: CreateDataSourceTableAsSelectCommand =>
executedQuery.toSeq
case _ => cmd :: Nil
}
}

/**
* A concrete command should override this lazy field to wrap up any side effects caused by the
* command or any other computation that should be evaluated exactly once. The value of this field
Expand All @@ -79,8 +111,6 @@ case class ExecutedCommandExec(cmd: RunnableCommand) extends LeafExecNode {
cmd.run(session).map(converter(_).asInstanceOf[InternalRow])
}

override def innerChildren: Seq[QueryPlan[_]] = cmd :: Nil

override def output: Seq[Attribute] = cmd.output

override def nodeName: String = "Execute " + cmd.nodeName
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import org.apache.spark.internal.LogKeys._
import org.apache.spark.internal.MDC
import org.apache.spark.sql.{AnalysisException, Row, SaveMode, SparkSession}
import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.catalyst.plans.logical.{CTEInChildren, CTERelationDef, LogicalPlan, WithCTE}
import org.apache.spark.sql.catalyst.plans.logical.{CTEInChildren, CTERelationDef, LogicalPlan, Statistics, WithCTE}
import org.apache.spark.sql.catalyst.util.{removeInternalMetadata, CharVarcharUtils}
import org.apache.spark.sql.classic.ClassicConversions.castToImpl
import org.apache.spark.sql.errors.QueryCompilationErrors
Expand Down Expand Up @@ -149,6 +149,9 @@ case class CreateDataSourceTableAsSelectCommand(
assert(query.resolved)
override def innerChildren: Seq[LogicalPlan] = query :: Nil

// Override stats to return stats from the inner query
override def stats: Statistics = query.stats

override def run(sparkSession: SparkSession): Seq[Row] = {
assert(table.tableType != CatalogTableType.VIEW)
assert(table.provider.isDefined)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -300,4 +300,38 @@ class CreateTableAsSelectSuite extends DataSourceTest with SharedSparkSession {
stop = 57))
}
}

test("SPARK-48660: EXPLAIN COST should show statistics") {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

== Optimized Logical Plan ==
CreateDataSourceTableAsSelectCommand `spark_catalog`.`default`.`order_history_version_audit_rno`, ErrorIfExists, [eventid, id, referenceid, type, referencetype, sellerid, buyerid, producerid, versionid, changedocuments, hr, dt]
   +- Project [eventid#5, id#6, referenceid#7, type#8, referencetype#9, sellerid#10L, buyerid#11L, producerid#12, versionid#13, changedocuments#14, hr#16, dt#15]
      +- Project [eventid#5, id#6, referenceid#7, type#8, referencetype#9, sellerid#10L, buyerid#11L, producerid#12, versionid#13, changedocuments#14, dt#15, hr#16]
         +- Filter (dt#15 >= 2023-11-29)
            +- SubqueryAlias spark_catalog.default.order_history_version_audit_rno
               +- Relation spark_catalog.default.order_history_version_audit_rno[eventid#5,id#6,referenceid#7,type#8,referencetype#9,sellerid#10L,buyerid#11L,producerid#12,versionid#13,changedocuments#14,dt#15,hr#16] parquet

== Physical Plan ==
Execute CreateDataSourceTableAsSelectCommand
   +- CreateDataSourceTableAsSelectCommand `spark_catalog`.`default`.`order_history_version_audit_rno`, ErrorIfExists, [eventid, id, referenceid, type, referencetype, sellerid, buyerid, producerid, versionid, changedocuments, hr, dt]
         +- Project [eventid#5, id#6, referenceid#7, type#8, referencetype#9, sellerid#10L, buyerid#11L, producerid#12, versionid#13, changedocuments#14, hr#16, dt#15]
            +- Project [eventid#5, id#6, referenceid#7, type#8, referencetype#9, sellerid#10L, buyerid#11L, producerid#12, versionid#13, changedocuments#14, dt#15, hr#16]
               +- Filter (dt#15 >= 2023-11-29)
                  +- SubqueryAlias spark_catalog.default.order_history_version_audit_rno
                     +- Relation spark_catalog.default.order_history_version_audit_rno[eventid#5,id#6,referenceid#7,type#8,referencetype#9,sellerid#10L,buyerid#11L,producerid#12,versionid#13,changedocuments#14,dt#15,hr#16] parquet

From the cases provided by @wangyum , I believe there are two more critical issues here:

  1. The Optimized Logical Plan contains redundant SubqueryAlias nodes.
  2. The Physical Plan contains redundant SubqueryAlias and Relation nodes.

Therefore, in the test cases, I think we should primarily focus on making assertion checks for these issues.

In addition, I have printed out the result of explainOutput.

== Optimized Logical Plan ==
CreateDataSourceTableAsSelectCommand `spark_catalog`.`default`.`target_table`, ErrorIfExists, [id, name]
   +- Project [id#126, name#127], Statistics(sizeInBytes=1.0 B)
      +- Filter (id#126 > 0), Statistics(sizeInBytes=1.0 B)
         +- SubqueryAlias spark_catalog.default.source_table, Statistics(sizeInBytes=1.0 B)
            +- Relation spark_catalog.default.source_table[id#126,name#127] parquet, Statistics(sizeInBytes=0.0 B)

== Physical Plan ==
Execute CreateDataSourceTableAsSelectCommand CreateDataSourceTableAsSelectCommand `spark_catalog`.`default`.`target_table`, ErrorIfExists, [id, name]
   +- *(1) Filter (isnotnull(id#126) AND (id#126 > 0))
      +- *(1) ColumnarToRow
         +- FileScan parquet spark_catalog.default.source_table[id#126,name#127] Batched: true, DataFilters: [isnotnull(id#126), (id#126 > 0)], Format: Parquet, Location: InMemoryFileIndex(1 paths)[file:/Users/yangjie01/SourceCode/git/spark-sbt/sql/core/spark-warehous..., PartitionFilters: [], PushedFilters: [IsNotNull(id), GreaterThan(id,0)], ReadSchema: struct<id:int,name:string>

It seems that the second issue I described has been fixed, but the SubqueryAlias nodes still exist in the Optimized Logical Plan. Could you take a further look into this? @yuexing

@wangyum Is my description accurate? If there's anything incorrect, please help me correct it.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ok, I see. Let me also do a innerChildren fix in the Command class, which is the logic plan.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

now the output is:

== Optimized Logical Plan ==
CreateDataSourceTableAsSelectCommand spark_catalog.default.target_table, ErrorIfExists, Project [id#16, name#17], [id, name]
+- Project [id#16, name#17]
+- Filter (id#16 > 0)
+- Relation spark_catalog.default.source_table[id#16,name#17] parquet, Statistics(sizeInBytes=0.0 B)

== Physical Plan ==
Execute CreateDataSourceTableAsSelectCommand CreateDataSourceTableAsSelectCommand spark_catalog.default.target_table, ErrorIfExists, Project [id#16, name#17], [id, name]
+- *(1) Filter (isnotnull(id#16) AND (id#16 > 0))
+- *(1) ColumnarToRow
+- FileScan parquet spark_catalog.default.source_table[id#16,name#17] Batched: true, DataFilters: [isnotnull(id#16), (id#16 > 0)], Format: Parquet, Location: InMemoryFileIndex(1 paths)[file:/Users/yuexing/playground/spark/sql/core/spark-warehouse/org.apac..., PartitionFilters: [], PushedFilters: [IsNotNull(id), GreaterThan(id,0)], ReadSchema: structid:int,name:string

withTable("source_table") {
// Create source table with data
sql("""
CREATE TABLE source_table (
id INT,
name STRING
) USING PARQUET
""")

// Get explain output for CTAS
val explainResult = sql("""
EXPLAIN COST
CREATE TABLE target_table
USING PARQUET
AS SELECT * FROM source_table WHERE id > 0
""").collect()

val explainOutput = explainResult.map(_.getString(0)).mkString("\n")

assert(explainOutput.contains("CreateDataSourceTableAsSelectCommand"),
s"EXPLAIN COST output should contain createtableasselect. Output: $explainOutput")

// The explain output should contain statistics information
assert(explainOutput.contains("Statistics"),
s"EXPLAIN COST output should contain statistics information. Output: $explainOutput")

// The explain output should contain pushdown information
assert(explainOutput.contains("PushedFilters"),
s"EXPLAIN COST output should contain pushdown information. Output: $explainOutput")
}
}

}