org.apache.spark.sql.hive

HiveContext

class HiveContext extends SQLContext with Logging

An instance of the Spark SQL execution engine that integrates with data stored in Hive. Configuration for Hive is read from hive-site.xml on the classpath.

Self Type
HiveContext
Annotations
@deprecated
Deprecated

(Since version 2.0.0) Use SparkSession.builder.enableHiveSupport instead

Linear Supertypes
SQLContext, Serializable, Serializable, Logging, AnyRef, Any
Ordering
  1. Alphabetic
  2. By inheritance
Inherited
  1. HiveContext
  2. SQLContext
  3. Serializable
  4. Serializable
  5. Logging
  6. AnyRef
  7. Any
  1. Hide All
  2. Show all
Learn more about member selection
Visibility
  1. Public
  2. All

Instance Constructors

  1. new HiveContext(sc: JavaSparkContext)

  2. new HiveContext(sc: SparkContext)

Value Members

  1. final def !=(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  2. final def !=(arg0: Any): Boolean

    Definition Classes
    Any
  3. final def ##(): Int

    Definition Classes
    AnyRef → Any
  4. final def ==(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  5. final def ==(arg0: Any): Boolean

    Definition Classes
    Any
  6. final def asInstanceOf[T0]: T0

    Definition Classes
    Any
  7. def baseRelationToDataFrame(baseRelation: BaseRelation): DataFrame

    Definition Classes
    SQLContext
  8. def cacheTable(tableName: String): Unit

    Definition Classes
    SQLContext
  9. def clearCache(): Unit

    Definition Classes
    SQLContext
  10. def clone(): AnyRef

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  11. def createDataFrame(data: List[_], beanClass: Class[_]): DataFrame

    Definition Classes
    SQLContext
  12. def createDataFrame(rdd: JavaRDD[_], beanClass: Class[_]): DataFrame

    Definition Classes
    SQLContext
  13. def createDataFrame(rdd: RDD[_], beanClass: Class[_]): DataFrame

    Definition Classes
    SQLContext
  14. def createDataFrame(rows: List[Row], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @DeveloperApi() @Evolving()
  15. def createDataFrame(rowRDD: JavaRDD[Row], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @DeveloperApi() @Evolving()
  16. def createDataFrame(rowRDD: RDD[Row], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @DeveloperApi() @Evolving()
  17. def createDataFrame[A <: Product](data: Seq[A])(implicit arg0: scala.reflect.api.JavaUniverse.TypeTag[A]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental() @Evolving()
  18. def createDataFrame[A <: Product](rdd: RDD[A])(implicit arg0: scala.reflect.api.JavaUniverse.TypeTag[A]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental() @Evolving()
  19. def createDataset[T](data: List[T])(implicit arg0: Encoder[T]): Dataset[T]

    Definition Classes
    SQLContext
    Annotations
    @Experimental() @Evolving()
  20. def createDataset[T](data: RDD[T])(implicit arg0: Encoder[T]): Dataset[T]

    Definition Classes
    SQLContext
    Annotations
    @Experimental()
  21. def createDataset[T](data: Seq[T])(implicit arg0: Encoder[T]): Dataset[T]

    Definition Classes
    SQLContext
    Annotations
    @Experimental() @Evolving()
  22. def dropTempTable(tableName: String): Unit

    Definition Classes
    SQLContext
  23. def emptyDataFrame: DataFrame

    Definition Classes
    SQLContext
  24. final def eq(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  25. def equals(arg0: Any): Boolean

    Definition Classes
    AnyRef → Any
  26. def experimental: ExperimentalMethods

    Definition Classes
    SQLContext
    Annotations
    @Experimental() @transient() @Unstable()
  27. def finalize(): Unit

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  28. def getAllConfs: Map[String, String]

    Definition Classes
    SQLContext
  29. final def getClass(): Class[_]

    Definition Classes
    AnyRef → Any
  30. def getConf(key: String, defaultValue: String): String

    Definition Classes
    SQLContext
  31. def getConf(key: String): String

    Definition Classes
    SQLContext
  32. def hashCode(): Int

    Definition Classes
    AnyRef → Any
  33. def initializeLogIfNecessary(isInterpreter: Boolean): Unit

    Attributes
    protected
    Definition Classes
    Logging
  34. def isCached(tableName: String): Boolean

    Definition Classes
    SQLContext
  35. final def isInstanceOf[T0]: Boolean

    Definition Classes
    Any
  36. def isTraceEnabled(): Boolean

    Attributes
    protected
    Definition Classes
    Logging
  37. def listenerManager: ExecutionListenerManager

    Definition Classes
    SQLContext
    Annotations
    @Experimental() @Evolving()
  38. def log: Logger

    Attributes
    protected
    Definition Classes
    Logging
  39. def logDebug(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  40. def logDebug(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  41. def logError(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  42. def logError(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  43. def logInfo(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  44. def logInfo(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  45. def logName: String

    Attributes
    protected
    Definition Classes
    Logging
  46. def logTrace(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  47. def logTrace(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  48. def logWarning(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  49. def logWarning(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  50. final def ne(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  51. def newSession(): HiveContext

    Returns a new HiveContext as new session, which will have separated SQLConf, UDF/UDAF, temporary tables and SessionState, but sharing the same CacheManager, IsolatedClientLoader and Hive client (both of execution and metadata) with existing HiveContext.

    Returns a new HiveContext as new session, which will have separated SQLConf, UDF/UDAF, temporary tables and SessionState, but sharing the same CacheManager, IsolatedClientLoader and Hive client (both of execution and metadata) with existing HiveContext.

    Definition Classes
    HiveContext → SQLContext
  52. final def notify(): Unit

    Definition Classes
    AnyRef
  53. final def notifyAll(): Unit

    Definition Classes
    AnyRef
  54. def range(start: Long, end: Long, step: Long, numPartitions: Int): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental() @Evolving()
  55. def range(start: Long, end: Long, step: Long): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental() @Evolving()
  56. def range(start: Long, end: Long): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental() @Evolving()
  57. def range(end: Long): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @Experimental() @Evolving()
  58. def read: DataFrameReader

    Definition Classes
    SQLContext
  59. def readStream: DataStreamReader

    Definition Classes
    SQLContext
    Annotations
    @Evolving()
  60. def refreshTable(tableName: String): Unit

    Invalidate and refresh all the cached the metadata of the given table.

    Invalidate and refresh all the cached the metadata of the given table. For performance reasons, Spark SQL or the external data source library it uses might cache certain metadata about a table, such as the location of blocks. When those change outside of Spark SQL, users should call this function to invalidate the cache.

    Since

    1.3.0

  61. def setConf(key: String, value: String): Unit

    Definition Classes
    SQLContext
  62. def setConf(props: Properties): Unit

    Definition Classes
    SQLContext
  63. def sparkContext: SparkContext

    Definition Classes
    SQLContext
  64. val sparkSession: SparkSession

    Definition Classes
    SQLContext
  65. def sql(sqlText: String): DataFrame

    Definition Classes
    SQLContext
  66. def streams: StreamingQueryManager

    Definition Classes
    SQLContext
  67. final def synchronized[T0](arg0: ⇒ T0): T0

    Definition Classes
    AnyRef
  68. def table(tableName: String): DataFrame

    Definition Classes
    SQLContext
  69. def tableNames(databaseName: String): Array[String]

    Definition Classes
    SQLContext
  70. def tableNames(): Array[String]

    Definition Classes
    SQLContext
  71. def tables(databaseName: String): DataFrame

    Definition Classes
    SQLContext
  72. def tables(): DataFrame

    Definition Classes
    SQLContext
  73. def toString(): String

    Definition Classes
    AnyRef → Any
  74. def udf: UDFRegistration

    Definition Classes
    SQLContext
  75. def uncacheTable(tableName: String): Unit

    Definition Classes
    SQLContext
  76. final def wait(): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  77. final def wait(arg0: Long, arg1: Int): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  78. final def wait(arg0: Long): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )

Deprecated Value Members

  1. def applySchema(rdd: JavaRDD[_], beanClass: Class[_]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.3.0) Use createDataFrame instead.

  2. def applySchema(rdd: RDD[_], beanClass: Class[_]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.3.0) Use createDataFrame instead.

  3. def applySchema(rowRDD: JavaRDD[Row], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.3.0) Use createDataFrame instead.

  4. def applySchema(rowRDD: RDD[Row], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.3.0) Use createDataFrame instead.

  5. def createExternalTable(tableName: String, source: String, schema: StructType, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 2.2.0) use sparkSession.catalog.createTable instead.

  6. def createExternalTable(tableName: String, source: String, schema: StructType, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 2.2.0) use sparkSession.catalog.createTable instead.

  7. def createExternalTable(tableName: String, source: String, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 2.2.0) use sparkSession.catalog.createTable instead.

  8. def createExternalTable(tableName: String, source: String, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 2.2.0) use sparkSession.catalog.createTable instead.

  9. def createExternalTable(tableName: String, path: String, source: String): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 2.2.0) use sparkSession.catalog.createTable instead.

  10. def createExternalTable(tableName: String, path: String): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 2.2.0) use sparkSession.catalog.createTable instead.

  11. def jdbc(url: String, table: String, theParts: Array[String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.jdbc() instead.

  12. def jdbc(url: String, table: String, columnName: String, lowerBound: Long, upperBound: Long, numPartitions: Int): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.jdbc() instead.

  13. def jdbc(url: String, table: String): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.jdbc() instead.

  14. def jsonFile(path: String, samplingRatio: Double): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json() instead.

  15. def jsonFile(path: String, schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json() instead.

  16. def jsonFile(path: String): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json() instead.

  17. def jsonRDD(json: JavaRDD[String], samplingRatio: Double): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json() instead.

  18. def jsonRDD(json: RDD[String], samplingRatio: Double): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json() instead.

  19. def jsonRDD(json: JavaRDD[String], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json() instead.

  20. def jsonRDD(json: RDD[String], schema: StructType): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json() instead.

  21. def jsonRDD(json: JavaRDD[String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json() instead.

  22. def jsonRDD(json: RDD[String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.json() instead.

  23. def load(source: String, schema: StructType, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.format(source).schema(schema).options(options).load() instead.

  24. def load(source: String, schema: StructType, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.format(source).schema(schema).options(options).load() instead.

  25. def load(source: String, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.format(source).options(options).load() instead.

  26. def load(source: String, options: Map[String, String]): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.format(source).options(options).load() instead.

  27. def load(path: String, source: String): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.format(source).load(path) instead.

  28. def load(path: String): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated
    Deprecated

    (Since version 1.4.0) Use read.load(path) instead.

  29. def parquetFile(paths: String*): DataFrame

    Definition Classes
    SQLContext
    Annotations
    @deprecated @varargs()
    Deprecated

    (Since version 1.4.0) Use read.parquet() instead.

Inherited from SQLContext

Inherited from Serializable

Inherited from Serializable

Inherited from Logging

Inherited from AnyRef

Inherited from Any

Ungrouped