Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPARK-28716][SQL] Add id to Exchange and Subquery's stringArgs method for easier identifying their reuses in query plans #727

Merged
merged 1 commit into from
Jan 29, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.apache.spark.sql.execution

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream}
import java.util.concurrent.atomic.AtomicInteger

import scala.collection.mutable.ArrayBuffer

Expand Down Expand Up @@ -45,6 +46,11 @@ object SparkPlan {

/** The [[LogicalPlan]] inherited from its ancestor. */
val LOGICAL_PLAN_INHERITED_TAG = TreeNodeTag[LogicalPlan]("logical_plan_inherited")

private val nextPlanId = new AtomicInteger(0)

/** Register a new SparkPlan, returning its SparkPlan ID */
private[execution] def newPlanId(): Int = nextPlanId.getAndIncrement()
}

/**
Expand All @@ -63,6 +69,8 @@ abstract class SparkPlan extends QueryPlan[SparkPlan] with Logging with Serializ

protected def sparkContext = sqlContext.sparkContext

val id: Int = SparkPlan.newPlanId()

// sqlContext will be null when SparkPlan nodes are created without the active sessions.
val subexpressionEliminationEnabled: Boolean = if (sqlContext != null) {
sqlContext.conf.subexpressionEliminationEnabled
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -717,6 +717,8 @@ case class SubqueryExec(name: String, child: SparkPlan)
override def executeCollect(): Array[InternalRow] = {
ThreadUtils.awaitResult(relationFuture, Duration.Inf)
}

override def stringArgs: Iterator[Any] = super.stringArgs ++ Iterator(s"[id=#$id]")
}

object SubqueryExec {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,8 @@ import org.apache.spark.sql.types.StructType
*/
abstract class Exchange extends UnaryExecNode {
override def output: Seq[Attribute] = child.output

override def stringArgs: Iterator[Any] = super.stringArgs ++ Iterator(s"[id=#$id]")
}

/**
Expand Down