Packages

c

org.apache.spark.sql.catalyst.trees

DataFrameQueryContext

case class DataFrameQueryContext(stackTrace: Seq[StackTraceElement], pysparkErrorContext: Option[(String, String)]) extends QueryContext with Product with Serializable

Linear Supertypes
Serializable, Product, Equals, QueryContext, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. DataFrameQueryContext
  2. Serializable
  3. Product
  4. Equals
  5. QueryContext
  6. AnyRef
  7. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. Protected

Instance Constructors

  1. new DataFrameQueryContext(stackTrace: Seq[StackTraceElement], pysparkErrorContext: Option[(String, String)])

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##: Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  5. val callSite: String
    Definition Classes
    DataFrameQueryContext → QueryContext
  6. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.CloneNotSupportedException]) @IntrinsicCandidate() @native()
  7. val contextType: QueryContextType
  8. val displayedCallsite: String
  9. val displayedFragment: String
  10. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  11. val fragment: String
    Definition Classes
    DataFrameQueryContext → QueryContext
  12. final def getClass(): Class[_ <: AnyRef]
    Definition Classes
    AnyRef → Any
    Annotations
    @IntrinsicCandidate() @native()
  13. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  14. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  15. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @IntrinsicCandidate() @native()
  16. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @IntrinsicCandidate() @native()
  17. def objectName(): String
    Definition Classes
    DataFrameQueryContext → QueryContext
  18. def objectType(): String
    Definition Classes
    DataFrameQueryContext → QueryContext
  19. def productElementNames: Iterator[String]
    Definition Classes
    Product
  20. val pysparkCallSite: String
  21. val pysparkErrorContext: Option[(String, String)]
  22. val pysparkFragment: String
  23. val stackTrace: Seq[StackTraceElement]
  24. def startIndex(): Int
    Definition Classes
    DataFrameQueryContext → QueryContext
  25. def stopIndex(): Int
    Definition Classes
    DataFrameQueryContext → QueryContext
  26. lazy val summary: String
    Definition Classes
    DataFrameQueryContext → QueryContext
  27. final def synchronized[T0](arg0: => T0): T0
    Definition Classes
    AnyRef
  28. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException])
  29. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException]) @native()
  30. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException])

Deprecated Value Members

  1. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.Throwable]) @Deprecated
    Deprecated

    (Since version 9)

Inherited from Serializable

Inherited from Product

Inherited from Equals

Inherited from QueryContext

Inherited from AnyRef

Inherited from Any

Ungrouped