Packages

c

org.apache.spark.sql.execution

FileSourceScanExec

case class FileSourceScanExec(relation: HadoopFsRelation, stream: Option[SparkDataStream], output: Seq[Attribute], requiredSchema: StructType, partitionFilters: Seq[Expression], optionalBucketSet: Option[BitSet], optionalNumCoalescedBuckets: Option[Int], dataFilters: Seq[Expression], tableIdentifier: Option[TableIdentifier], disableBucketedScan: Boolean = false) extends SparkPlan with FileSourceScanLike with Product with Serializable

Physical plan node for scanning data from HadoopFsRelations.

relation

The file-based relation to scan.

output

Output attributes of the scan, including data attributes and partition attributes.

requiredSchema

Required schema of the underlying relation, excluding partition columns.

partitionFilters

Predicates to use for partition pruning.

optionalBucketSet

Bucket ids for bucket pruning.

optionalNumCoalescedBuckets

Number of coalesced buckets.

dataFilters

Filters on non-partition columns.

tableIdentifier

Identifier for the table in the metastore.

disableBucketedScan

Disable bucketed scan based on physical query plan, see rule DisableUnnecessaryBucketedScan for details.

Linear Supertypes
FileSourceScanLike, SessionStateHelper, DataSourceScanExec, StreamSourceAwareSparkPlan, LeafExecNode, LeafLike[SparkPlan], SparkPlan, Serializable, Logging, QueryPlan[SparkPlan], SQLConfHelper, TreeNode[SparkPlan], WithOrigin, TreePatternBits, Product, Equals, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. FileSourceScanExec
  2. FileSourceScanLike
  3. SessionStateHelper
  4. DataSourceScanExec
  5. StreamSourceAwareSparkPlan
  6. LeafExecNode
  7. LeafLike
  8. SparkPlan
  9. Serializable
  10. Logging
  11. QueryPlan
  12. SQLConfHelper
  13. TreeNode
  14. WithOrigin
  15. TreePatternBits
  16. Product
  17. Equals
  18. AnyRef
  19. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. Protected

Instance Constructors

  1. new FileSourceScanExec(relation: HadoopFsRelation, stream: Option[SparkDataStream], output: Seq[Attribute], requiredSchema: StructType, partitionFilters: Seq[Expression], optionalBucketSet: Option[BitSet], optionalNumCoalescedBuckets: Option[Int], dataFilters: Seq[Expression], tableIdentifier: Option[TableIdentifier], disableBucketedScan: Boolean = false)

    relation

    The file-based relation to scan.

    output

    Output attributes of the scan, including data attributes and partition attributes.

    requiredSchema

    Required schema of the underlying relation, excluding partition columns.

    partitionFilters

    Predicates to use for partition pruning.

    optionalBucketSet

    Bucket ids for bucket pruning.

    optionalNumCoalescedBuckets

    Number of coalesced buckets.

    dataFilters

    Filters on non-partition columns.

    tableIdentifier

    Identifier for the table in the metastore.

    disableBucketedScan

    Disable bucketed scan based on physical query plan, see rule DisableUnnecessaryBucketedScan for details.

Type Members

  1. implicit class LogStringContext extends AnyRef
    Definition Classes
    Logging

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##: Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. def MDC(key: LogKey, value: Any): MDC
    Attributes
    protected
    Definition Classes
    Logging
  5. def allAttributes: AttributeSeq
    Definition Classes
    QueryPlan
  6. def apply(number: Int): TreeNode[_]
    Definition Classes
    TreeNode
  7. def argString(maxFields: Int): String
    Definition Classes
    TreeNode
  8. def asCode: String
    Definition Classes
    TreeNode
  9. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  10. lazy val bucketedScan: Boolean
    Definition Classes
    FileSourceScanLike
  11. def canonicalized: SparkPlan
    Definition Classes
    QueryPlan
  12. final def children: Seq[SparkPlan]
    Definition Classes
    LeafLike
  13. def cleanupResources(): Unit

    Cleans up the resources used by the physical operator (if any).

    Cleans up the resources used by the physical operator (if any). In general, all the resources should be cleaned up when the task finishes but operators like SortMergeJoinExec and LimitExec may want eager cleanup to free up tight resources (e.g., memory).

    Attributes
    protected[sql]
    Definition Classes
    SparkPlan
  14. def clone(): SparkPlan
    Definition Classes
    TreeNode → AnyRef
  15. def collect[B](pf: PartialFunction[SparkPlan, B]): Seq[B]
    Definition Classes
    TreeNode
  16. def collectFirst[B](pf: PartialFunction[SparkPlan, B]): Option[B]
    Definition Classes
    TreeNode
  17. def collectFirstWithSubqueries[B](f: PartialFunction[SparkPlan, B]): Option[B]
    Definition Classes
    QueryPlan
  18. def collectLeaves(): Seq[SparkPlan]
    Definition Classes
    TreeNode
  19. def collectWithSubqueries[B](f: PartialFunction[SparkPlan, B]): Seq[B]
    Definition Classes
    QueryPlan
  20. def conf: SQLConf
    Definition Classes
    SparkPlan → SQLConfHelper
  21. final def containsAllPatterns(patterns: TreePattern*): Boolean
    Definition Classes
    TreePatternBits
  22. final def containsAnyPattern(patterns: TreePattern*): Boolean
    Definition Classes
    TreePatternBits
  23. def containsChild: Set[TreeNode[_]]
    Definition Classes
    TreeNode
  24. final def containsPattern(t: TreePattern): Boolean
    Definition Classes
    TreePatternBits
    Annotations
    @inline()
  25. def copyTagsFrom(other: SparkPlan): Unit
    Definition Classes
    TreeNode
  26. val dataFilters: Seq[Expression]
  27. def deterministic: Boolean
    Definition Classes
    QueryPlan
  28. val disableBucketedScan: Boolean
  29. def doCanonicalize(): FileSourceScanExec
    Definition Classes
    FileSourceScanExec → QueryPlan
  30. def doExecute(): RDD[InternalRow]

    Produces the result of the query as an RDD[InternalRow]

    Produces the result of the query as an RDD[InternalRow]

    Overridden by concrete implementations of SparkPlan.

    Attributes
    protected
    Definition Classes
    FileSourceScanExecSparkPlan
  31. def doExecuteBroadcast[T](): Broadcast[T]

    Produces the result of the query as a broadcast variable.

    Produces the result of the query as a broadcast variable.

    Overridden by concrete implementations of SparkPlan.

    Attributes
    protected[sql]
    Definition Classes
    SparkPlan
  32. def doExecuteColumnar(): RDD[ColumnarBatch]

    Produces the result of the query as an RDD[ColumnarBatch] if supportsColumnar returns true.

    Produces the result of the query as an RDD[ColumnarBatch] if supportsColumnar returns true. By convention the executor that creates a ColumnarBatch is responsible for closing it when it is no longer needed. This allows input formats to be able to reuse batches if needed.

    Attributes
    protected
    Definition Classes
    FileSourceScanExecSparkPlan
  33. def doExecuteWrite(writeFilesSpec: WriteFilesSpec): RDD[WriterCommitMessage]

    Produces the result of the writes as an RDD[WriterCommitMessage]

    Produces the result of the writes as an RDD[WriterCommitMessage]

    Overridden by concrete implementations of SparkPlan.

    Attributes
    protected
    Definition Classes
    SparkPlan
  34. def doPrepare(): Unit

    Overridden by concrete implementations of SparkPlan.

    Overridden by concrete implementations of SparkPlan. It is guaranteed to run before any execute of SparkPlan. This is helpful if we want to set up some state before executing the query, e.g., BroadcastHashJoin uses it to broadcast asynchronously.

    Attributes
    protected
    Definition Classes
    SparkPlan
    Note

    prepare method has already walked down the tree, so the implementation doesn't have to call children's prepare methods. This will only be called once, protected by this.

  35. lazy val driverMetrics: Map[String, SQLMetric]
    Definition Classes
    FileSourceScanLike
  36. lazy val dynamicallySelectedPartitions: ScanFileListing
    Attributes
    protected
    Definition Classes
    FileSourceScanLike
    Annotations
    @transient()
  37. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  38. final def execute(): RDD[InternalRow]

    Returns the result of this query as an RDD[InternalRow] by delegating to doExecute after preparations.

    Returns the result of this query as an RDD[InternalRow] by delegating to doExecute after preparations.

    Concrete implementations of SparkPlan should override doExecute.

    Definition Classes
    SparkPlan
  39. final def executeBroadcast[T](): Broadcast[T]

    Returns the result of this query as a broadcast variable by delegating to doExecuteBroadcast after preparations.

    Returns the result of this query as a broadcast variable by delegating to doExecuteBroadcast after preparations.

    Concrete implementations of SparkPlan should override doExecuteBroadcast.

    Definition Classes
    SparkPlan
  40. def executeCollect(): Array[InternalRow]

    Runs this query returning the result as an array.

    Runs this query returning the result as an array.

    Definition Classes
    SparkPlan
  41. def executeCollectPublic(): Array[Row]

    Runs this query returning the result as an array, using external Row format.

    Runs this query returning the result as an array, using external Row format.

    Definition Classes
    SparkPlan
  42. final def executeColumnar(): RDD[ColumnarBatch]

    Returns the result of this query as an RDD[ColumnarBatch] by delegating to doColumnarExecute after preparations.

    Returns the result of this query as an RDD[ColumnarBatch] by delegating to doColumnarExecute after preparations.

    Concrete implementations of SparkPlan should override doColumnarExecute if supportsColumnar returns true.

    Definition Classes
    SparkPlan
  43. final def executeQuery[T](query: => T): T

    Executes a query after preparing the query and adding query plan information to created RDDs for visualization.

    Executes a query after preparing the query and adding query plan information to created RDDs for visualization.

    Attributes
    protected
    Definition Classes
    SparkPlan
  44. def executeTail(n: Int): Array[InternalRow]

    Runs this query returning the last n rows as an array.

    Runs this query returning the last n rows as an array.

    This is modeled after RDD.take but never runs any job locally on the driver.

    Definition Classes
    SparkPlan
  45. def executeTake(n: Int): Array[InternalRow]

    Runs this query returning the first n rows as an array.

    Runs this query returning the first n rows as an array.

    This is modeled after RDD.take but never runs any job locally on the driver.

    Definition Classes
    SparkPlan
  46. def executeToIterator(): Iterator[InternalRow]

    Runs this query returning the result as an iterator of InternalRow.

    Runs this query returning the result as an iterator of InternalRow.

    Definition Classes
    SparkPlan
    Note

    Triggers multiple jobs (one for each partition).

  47. def executeWrite(writeFilesSpec: WriteFilesSpec): RDD[WriterCommitMessage]

    Returns the result of writes as an RDD[WriterCommitMessage] variable by delegating to doExecuteWrite after preparations.

    Returns the result of writes as an RDD[WriterCommitMessage] variable by delegating to doExecuteWrite after preparations.

    Concrete implementations of SparkPlan should override doExecuteWrite.

    Definition Classes
    SparkPlan
  48. def exists(f: (SparkPlan) => Boolean): Boolean
    Definition Classes
    TreeNode
  49. final def expressions: Seq[Expression]
    Definition Classes
    QueryPlan
  50. def fastEquals(other: TreeNode[_]): Boolean
    Definition Classes
    TreeNode
  51. lazy val fileConstantMetadataColumns: Seq[AttributeReference]
    Definition Classes
    FileSourceScanLike
  52. def find(f: (SparkPlan) => Boolean): Option[SparkPlan]
    Definition Classes
    TreeNode
  53. def flatMap[A](f: (SparkPlan) => IterableOnce[A]): Seq[A]
    Definition Classes
    TreeNode
  54. def foreach(f: (SparkPlan) => Unit): Unit
    Definition Classes
    TreeNode
  55. def foreachUp(f: (SparkPlan) => Unit): Unit
    Definition Classes
    TreeNode
  56. def foreachWithSubqueries(f: (SparkPlan) => Unit): Unit
    Definition Classes
    QueryPlan
  57. def formattedNodeName: String
    Attributes
    protected
    Definition Classes
    QueryPlan
  58. def generateTreeString(depth: Int, lastChildren: ArrayList[Boolean], append: (String) => Unit, verbose: Boolean, prefix: String, addSuffix: Boolean, maxFields: Int, printNodeId: Boolean, printOutputColumns: Boolean, indent: Int): Unit
    Definition Classes
    TreeNode
  59. final def getClass(): Class[_ <: AnyRef]
    Definition Classes
    AnyRef → Any
    Annotations
    @IntrinsicCandidate() @native()
  60. def getDefaultTreePatternBits: BitSet
    Attributes
    protected
    Definition Classes
    TreeNode
  61. def getHadoopConf(sparkSession: SparkSession): Configuration
    Definition Classes
    SessionStateHelper
  62. def getHadoopConf(sparkSession: SparkSession, options: Map[String, String]): Configuration
    Definition Classes
    SessionStateHelper
  63. def getSparkConf(sparkSession: SparkSession): SparkConf
    Definition Classes
    SessionStateHelper
  64. def getSqlConf(sparkSession: SparkSession): SQLConf
    Definition Classes
    SessionStateHelper
  65. def getStream: Option[SparkDataStream]

    Get the stream associated with this node.

    Get the stream associated with this node.

    Definition Classes
    FileSourceScanExecStreamSourceAwareSparkPlan
  66. def getTagValue[T](tag: TreeNodeTag[T]): Option[T]
    Definition Classes
    TreeNode
  67. def hashCode(): Int
    Definition Classes
    TreeNode → AnyRef → Any
  68. def height: Int
    Definition Classes
    TreeNode
  69. val id: Int
    Definition Classes
    SparkPlan
  70. def initializeLogIfNecessary(isInterpreter: Boolean, silent: Boolean): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  71. def initializeLogIfNecessary(isInterpreter: Boolean): Unit
    Attributes
    protected
    Definition Classes
    Logging
  72. def innerChildren: Seq[QueryPlan[_]]
    Definition Classes
    QueryPlan → TreeNode
  73. lazy val inputRDD: RDD[InternalRow]
  74. def inputRDDs(): Seq[RDD[InternalRow]]

    The data being read in.

    The data being read in. This is to provide input to the tests in a way compatible with InputRDDCodegen which all implementations used to extend.

    Definition Classes
    FileSourceScanExecDataSourceScanExec
  75. def inputSet: AttributeSet
    Definition Classes
    QueryPlan
  76. def isCanonicalizedPlan: Boolean
    Attributes
    protected
    Definition Classes
    QueryPlan
  77. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  78. def isRuleIneffective(ruleId: RuleId): Boolean
    Attributes
    protected
    Definition Classes
    TreeNode
  79. def isTagsEmpty: Boolean
    Definition Classes
    TreeNode
  80. def isTraceEnabled(): Boolean
    Attributes
    protected
    Definition Classes
    Logging
  81. def jsonFields: List[JField]
    Attributes
    protected
    Definition Classes
    TreeNode
  82. final def legacyWithNewChildren(newChildren: Seq[SparkPlan]): SparkPlan
    Attributes
    protected
    Definition Classes
    TreeNode
  83. def log: Logger
    Attributes
    protected
    Definition Classes
    Logging
  84. def logBasedOnLevel(level: Level)(f: => MessageWithContext): Unit
    Attributes
    protected
    Definition Classes
    Logging
  85. def logDebug(msg: => String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  86. def logDebug(entry: LogEntry, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  87. def logDebug(entry: LogEntry): Unit
    Attributes
    protected
    Definition Classes
    Logging
  88. def logDebug(msg: => String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  89. def logError(msg: => String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  90. def logError(entry: LogEntry, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  91. def logError(entry: LogEntry): Unit
    Attributes
    protected
    Definition Classes
    Logging
  92. def logError(msg: => String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  93. def logInfo(msg: => String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  94. def logInfo(entry: LogEntry, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  95. def logInfo(entry: LogEntry): Unit
    Attributes
    protected
    Definition Classes
    Logging
  96. def logInfo(msg: => String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  97. def logName: String
    Attributes
    protected
    Definition Classes
    Logging
  98. def logTrace(msg: => String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  99. def logTrace(entry: LogEntry, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  100. def logTrace(entry: LogEntry): Unit
    Attributes
    protected
    Definition Classes
    Logging
  101. def logTrace(msg: => String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  102. def logWarning(msg: => String, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  103. def logWarning(entry: LogEntry, throwable: Throwable): Unit
    Attributes
    protected
    Definition Classes
    Logging
  104. def logWarning(entry: LogEntry): Unit
    Attributes
    protected
    Definition Classes
    Logging
  105. def logWarning(msg: => String): Unit
    Attributes
    protected
    Definition Classes
    Logging
  106. def logicalLink: Option[LogicalPlan]

    returns

    The logical plan this plan is linked to.

    Definition Classes
    SparkPlan
  107. def longMetric(name: String): SQLMetric

    returns

    SQLMetric for the name.

    Definition Classes
    SparkPlan
  108. def makeCopy(newArgs: Array[AnyRef]): SparkPlan

    Overridden make copy also propagates sqlContext to copied plan.

    Overridden make copy also propagates sqlContext to copied plan.

    Definition Classes
    SparkPlan → TreeNode
  109. def map[A](f: (SparkPlan) => A): Seq[A]
    Definition Classes
    TreeNode
  110. final def mapChildren(f: (SparkPlan) => SparkPlan): SparkPlan
    Definition Classes
    LeafLike
  111. def mapExpressions(f: (Expression) => Expression): FileSourceScanExec.this.type
    Definition Classes
    QueryPlan
  112. def mapProductIterator[B](f: (Any) => B)(implicit arg0: ClassTag[B]): Array[B]
    Attributes
    protected
    Definition Classes
    TreeNode
  113. def markRuleAsIneffective(ruleId: RuleId): Unit
    Attributes
    protected
    Definition Classes
    TreeNode
  114. val maxMetadataValueLength: Int
    Attributes
    protected
    Definition Classes
    DataSourceScanExec
  115. def mergeTagsFrom(other: SparkPlan): Unit
    Definition Classes
    TreeNode
  116. lazy val metadata: Map[String, String]
  117. def metrics: Map[String, SQLMetric]

    returns

    All metrics containing metrics of this SparkPlan.

    Definition Classes
    FileSourceScanLikeSparkPlan
  118. final def missingInput: AttributeSet
    Definition Classes
    QueryPlan
  119. def multiTransformDown(rule: PartialFunction[SparkPlan, Seq[SparkPlan]]): LazyList[SparkPlan]
    Definition Classes
    TreeNode
  120. def multiTransformDownWithPruning(cond: (TreePatternBits) => Boolean, ruleId: RuleId)(rule: PartialFunction[SparkPlan, Seq[SparkPlan]]): LazyList[SparkPlan]
    Definition Classes
    TreeNode
  121. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  122. val nodeName: String
    Definition Classes
    DataSourceScanExec → TreeNode
  123. val nodeNamePrefix: String
  124. val nodePatterns: Seq[TreePattern]
    Attributes
    protected
    Definition Classes
    TreeNode
  125. def nodeWithOutputColumnsString(maxColumns: Int): String
    Definition Classes
    QueryPlan → TreeNode
  126. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @IntrinsicCandidate() @native()
  127. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @IntrinsicCandidate() @native()
  128. def numberedTreeString: String
    Definition Classes
    TreeNode
  129. val optionalBucketSet: Option[BitSet]
  130. val optionalNumCoalescedBuckets: Option[Int]
  131. val origin: Origin
    Definition Classes
    TreeNode → WithOrigin
  132. def otherCopyArgs: Seq[AnyRef]
    Attributes
    protected
    Definition Classes
    TreeNode
  133. val output: Seq[Attribute]
    Definition Classes
    FileSourceScanExecFileSourceScanLike → QueryPlan
  134. lazy val outputOrdering: Seq[SortOrder]
    Definition Classes
    FileSourceScanLike → QueryPlan
  135. lazy val outputPartitioning: Partitioning

    Specifies how data is partitioned across different nodes in the cluster.

    Specifies how data is partitioned across different nodes in the cluster. Note this method may fail if it is invoked before EnsureRequirements is applied since PartitioningCollection requires all its partitionings to have the same number of partitions.

    Definition Classes
    FileSourceScanLikeSparkPlan
  136. def outputSet: AttributeSet
    Definition Classes
    QueryPlan
  137. def p(number: Int): SparkPlan
    Definition Classes
    TreeNode
  138. val partitionFilters: Seq[Expression]
  139. final def prepare(): Unit

    Prepares this SparkPlan for execution.

    Prepares this SparkPlan for execution. It's idempotent.

    Definition Classes
    SparkPlan
  140. def prepareSubqueries(): Unit

    Finds scalar subquery expressions in this plan node and starts evaluating them.

    Finds scalar subquery expressions in this plan node and starts evaluating them.

    Attributes
    protected
    Definition Classes
    SparkPlan
  141. def prettyJson: String
    Definition Classes
    TreeNode
  142. def printSchema(): Unit
    Definition Classes
    QueryPlan
  143. def producedAttributes: AttributeSet
    Definition Classes
    LeafExecNode → QueryPlan
  144. def productElementNames: Iterator[String]
    Definition Classes
    Product
  145. lazy val pushedDownFilters: Seq[Filter]
    Attributes
    protected
    Definition Classes
    FileSourceScanLike
    Annotations
    @transient()
  146. def redact(text: String): String

    Shorthand for calling redactString() without specifying redacting rules

    Shorthand for calling redactString() without specifying redacting rules

    Attributes
    protected
    Definition Classes
    DataSourceScanExec
  147. def references: AttributeSet
    Definition Classes
    QueryPlan
  148. val relation: HadoopFsRelation
  149. def requiredChildDistribution: Seq[Distribution]

    Specifies the data distribution requirements of all the children for this operator.

    Specifies the data distribution requirements of all the children for this operator. By default it's UnspecifiedDistribution for each child, which means each child can have any distribution.

    If an operator overwrites this method, and specifies distribution requirements(excluding UnspecifiedDistribution and BroadcastDistribution) for more than one child, Spark guarantees that the outputs of these children will have same number of partitions, so that the operator can safely zip partitions of these children's result RDDs. Some operators can leverage this guarantee to satisfy some interesting requirement, e.g., non-broadcast joins can specify ClusteredDistribution(a,b) for its left child, and specify ClusteredDistribution(c,d) for its right child, then it's guaranteed that left and right child are co-partitioned by a,b/c,d, which means tuples of same value are in the partitions of same index, e.g., (a=1,b=2) and (c=1,d=2) are both in the second partition of left and right child.

    Definition Classes
    SparkPlan
  150. def requiredChildOrdering: Seq[Seq[SortOrder]]

    Specifies sort order for each partition requirements on the input data for this operator.

    Specifies sort order for each partition requirements on the input data for this operator.

    Definition Classes
    SparkPlan
  151. val requiredSchema: StructType
  152. def resetMetrics(): Unit

    Resets all the metrics.

    Resets all the metrics.

    Definition Classes
    SparkPlan
  153. def rewriteAttrs(attrMap: AttributeMap[Attribute]): SparkPlan
    Definition Classes
    QueryPlan
  154. final def sameResult(other: SparkPlan): Boolean
    Definition Classes
    QueryPlan
  155. def schema: StructType
    Definition Classes
    QueryPlan
  156. def schemaString: String
    Definition Classes
    QueryPlan
  157. lazy val selectedPartitions: ScanFileListing
    Definition Classes
    FileSourceScanLike
    Annotations
    @transient()
  158. final def semanticHash(): Int
    Definition Classes
    QueryPlan
  159. def sendDriverMetrics(): Unit

    Send the driver-side metrics.

    Send the driver-side metrics. Before calling this function, selectedPartitions has been initialized. See SPARK-26327 for more details.

    Attributes
    protected
    Definition Classes
    FileSourceScanLike
  160. final val session: classic.SparkSession
    Definition Classes
    SparkPlan
  161. def sessionState(sparkSession: SparkSession): SessionState
    Attributes
    protected
    Definition Classes
    SessionStateHelper
  162. def setLogicalLink(logicalPlan: LogicalPlan): Unit

    Set logical plan link recursively if unset.

    Set logical plan link recursively if unset.

    Definition Classes
    SparkPlan
  163. def setTagValue[T](tag: TreeNodeTag[T], value: T): Unit
    Definition Classes
    TreeNode
  164. def simpleString(maxFields: Int): String
    Definition Classes
    DataSourceScanExec → QueryPlan → TreeNode
  165. def simpleStringWithNodeId(): String
    Definition Classes
    QueryPlan → TreeNode
  166. def sparkContext: SparkContext
    Attributes
    protected
    Definition Classes
    SparkPlan
  167. def statePrefix: String
    Attributes
    protected
    Definition Classes
    QueryPlan
  168. lazy val staticMetrics: Map[String, SQLMetric]

    SQL metrics generated only for scans using dynamic partition pruning.

    SQL metrics generated only for scans using dynamic partition pruning.

    Attributes
    protected
    Definition Classes
    FileSourceScanLike
  169. val stream: Option[SparkDataStream]
  170. def stringArgs: Iterator[Any]
    Attributes
    protected
    Definition Classes
    TreeNode
  171. def subqueries: Seq[SparkPlan]
    Definition Classes
    QueryPlan
  172. def subqueriesAll: Seq[SparkPlan]
    Definition Classes
    QueryPlan
  173. lazy val supportsColumnar: Boolean

    Return true if this stage of the plan supports columnar execution.

    Return true if this stage of the plan supports columnar execution. A plan can also support row-based execution (see supportsRowBased). Spark will decide which execution to be called during query planning.

    Definition Classes
    FileSourceScanExecSparkPlan
  174. def supportsRowBased: Boolean

    Return true if this stage of the plan supports row-based execution.

    Return true if this stage of the plan supports row-based execution. A plan can also support columnar execution (see supportsColumnar). Spark will decide which execution to be called during query planning.

    Definition Classes
    SparkPlan
  175. final def synchronized[T0](arg0: => T0): T0
    Definition Classes
    AnyRef
  176. val tableIdentifier: Option[TableIdentifier]
  177. def toJSON: String
    Definition Classes
    TreeNode
  178. def toRowBased: SparkPlan

    Converts the output of this plan to row-based if it is columnar plan.

    Converts the output of this plan to row-based if it is columnar plan.

    Definition Classes
    SparkPlan
  179. def toString(): String
    Definition Classes
    TreeNode → AnyRef → Any
  180. def transform(rule: PartialFunction[SparkPlan, SparkPlan]): SparkPlan
    Definition Classes
    TreeNode
  181. def transformAllExpressions(rule: PartialFunction[Expression, Expression]): FileSourceScanExec.this.type
    Definition Classes
    QueryPlan
  182. def transformAllExpressionsWithPruning(cond: (TreePatternBits) => Boolean, ruleId: RuleId)(rule: PartialFunction[Expression, Expression]): FileSourceScanExec.this.type
    Definition Classes
    QueryPlan
  183. def transformAllExpressionsWithSubqueries(rule: PartialFunction[Expression, Expression]): FileSourceScanExec.this.type
    Definition Classes
    QueryPlan
  184. def transformDown(rule: PartialFunction[SparkPlan, SparkPlan]): SparkPlan
    Definition Classes
    TreeNode
  185. def transformDownWithPruning(cond: (TreePatternBits) => Boolean, ruleId: RuleId)(rule: PartialFunction[SparkPlan, SparkPlan]): SparkPlan
    Definition Classes
    TreeNode
  186. def transformDownWithSubqueries(f: PartialFunction[SparkPlan, SparkPlan]): SparkPlan
    Definition Classes
    QueryPlan
  187. def transformDownWithSubqueriesAndPruning(cond: (TreePatternBits) => Boolean, ruleId: RuleId)(f: PartialFunction[SparkPlan, SparkPlan]): SparkPlan
    Definition Classes
    QueryPlan
  188. def transformExpressions(rule: PartialFunction[Expression, Expression]): FileSourceScanExec.this.type
    Definition Classes
    QueryPlan
  189. def transformExpressionsDown(rule: PartialFunction[Expression, Expression]): FileSourceScanExec.this.type
    Definition Classes
    QueryPlan
  190. def transformExpressionsDownWithPruning(cond: (TreePatternBits) => Boolean, ruleId: RuleId)(rule: PartialFunction[Expression, Expression]): FileSourceScanExec.this.type
    Definition Classes
    QueryPlan
  191. def transformExpressionsUp(rule: PartialFunction[Expression, Expression]): FileSourceScanExec.this.type
    Definition Classes
    QueryPlan
  192. def transformExpressionsUpWithPruning(cond: (TreePatternBits) => Boolean, ruleId: RuleId)(rule: PartialFunction[Expression, Expression]): FileSourceScanExec.this.type
    Definition Classes
    QueryPlan
  193. def transformExpressionsWithPruning(cond: (TreePatternBits) => Boolean, ruleId: RuleId)(rule: PartialFunction[Expression, Expression]): FileSourceScanExec.this.type
    Definition Classes
    QueryPlan
  194. def transformUp(rule: PartialFunction[SparkPlan, SparkPlan]): SparkPlan
    Definition Classes
    TreeNode
  195. def transformUpWithBeforeAndAfterRuleOnChildren(cond: (SparkPlan) => Boolean, ruleId: RuleId)(rule: PartialFunction[(SparkPlan, SparkPlan), SparkPlan]): SparkPlan
    Definition Classes
    TreeNode
  196. def transformUpWithNewOutput(rule: PartialFunction[SparkPlan, (SparkPlan, Seq[(Attribute, Attribute)])], skipCond: (SparkPlan) => Boolean, canGetOutput: (SparkPlan) => Boolean): SparkPlan
    Definition Classes
    QueryPlan
  197. def transformUpWithPruning(cond: (TreePatternBits) => Boolean, ruleId: RuleId)(rule: PartialFunction[SparkPlan, SparkPlan]): SparkPlan
    Definition Classes
    TreeNode
  198. def transformUpWithSubqueries(f: PartialFunction[SparkPlan, SparkPlan]): SparkPlan
    Definition Classes
    QueryPlan
  199. def transformUpWithSubqueriesAndPruning(cond: (TreePatternBits) => Boolean, ruleId: RuleId)(f: PartialFunction[SparkPlan, SparkPlan]): SparkPlan
    Definition Classes
    QueryPlan
  200. def transformWithPruning(cond: (TreePatternBits) => Boolean, ruleId: RuleId)(rule: PartialFunction[SparkPlan, SparkPlan]): SparkPlan
    Definition Classes
    TreeNode
  201. def transformWithSubqueries(f: PartialFunction[SparkPlan, SparkPlan]): SparkPlan
    Definition Classes
    QueryPlan
  202. def treePatternBits: BitSet
    Definition Classes
    QueryPlan → TreeNode → TreePatternBits
  203. def treeString(append: (String) => Unit, verbose: Boolean, addSuffix: Boolean, maxFields: Int, printOperatorId: Boolean, printOutputColumns: Boolean): Unit
    Definition Classes
    TreeNode
  204. final def treeString(verbose: Boolean, addSuffix: Boolean, maxFields: Int, printOperatorId: Boolean, printOutputColumns: Boolean): String
    Definition Classes
    TreeNode
  205. final def treeString: String
    Definition Classes
    TreeNode
  206. def unsetTagValue[T](tag: TreeNodeTag[T]): Unit
    Definition Classes
    TreeNode
  207. def updateOuterReferencesInSubquery(plan: SparkPlan, attrMap: AttributeMap[Attribute]): SparkPlan
    Attributes
    protected
    Definition Classes
    QueryPlan
  208. final def validateNodePatterns(): Unit
    Definition Classes
    QueryPlan → TreeNode
  209. def vectorTypes: Option[Seq[String]]

    The exact java types of the columns that are output in columnar processing mode.

    The exact java types of the columns that are output in columnar processing mode. This is a performance optimization for code generation and is optional.

    Definition Classes
    FileSourceScanLikeSparkPlan
  210. def verboseString(maxFields: Int): String
    Definition Classes
    QueryPlan → TreeNode
  211. def verboseStringWithOperatorId(): String
    Definition Classes
    FileSourceScanLikeDataSourceScanExecLeafExecNode → QueryPlan
  212. def verboseStringWithSuffix(maxFields: Int): String
    Definition Classes
    TreeNode
  213. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException])
  214. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException]) @native()
  215. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException])
  216. def waitForSubqueries(): Unit

    Blocks the thread until all subqueries finish evaluation and update the results.

    Blocks the thread until all subqueries finish evaluation and update the results.

    Attributes
    protected
    Definition Classes
    SparkPlan
  217. def withLogContext(context: Map[String, String])(body: => Unit): Unit
    Attributes
    protected
    Definition Classes
    Logging
  218. final def withNewChildren(newChildren: Seq[SparkPlan]): SparkPlan
    Definition Classes
    TreeNode
  219. def withNewChildrenInternal(newChildren: IndexedSeq[SparkPlan]): SparkPlan
    Definition Classes
    LeafLike
  220. def withSQLConf[T](pairs: (String, String)*)(f: => T): T
    Attributes
    protected
    Definition Classes
    SQLConfHelper

Deprecated Value Members

  1. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.Throwable]) @Deprecated
    Deprecated

    (Since version 9)

Inherited from FileSourceScanLike

Inherited from SessionStateHelper

Inherited from DataSourceScanExec

Inherited from LeafExecNode

Inherited from LeafLike[SparkPlan]

Inherited from SparkPlan

Inherited from Serializable

Inherited from Logging

Inherited from QueryPlan[SparkPlan]

Inherited from SQLConfHelper

Inherited from TreeNode[SparkPlan]

Inherited from WithOrigin

Inherited from TreePatternBits

Inherited from Product

Inherited from Equals

Inherited from AnyRef

Inherited from Any

Ungrouped