object KyuubiSparkUtil extends Logging
Linear Supertypes
Ordering
- Alphabetic
- By Inheritance
Inherited
- KyuubiSparkUtil
- Logging
- AnyRef
- Any
- Hide All
- Show All
Visibility
- Public
- All
Type Members
- type KVIndexParam = org.apache.spark.util.kvstore.KVIndex @scala.annotation.meta.getter
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- lazy val SPARK_ENGINE_RUNTIME_VERSION: SemanticVersion
- final val SPARK_SCHEDULER_POOL_KEY: String("spark.scheduler.pool")
- final val SPARK_SQL_EXECUTION_ID_KEY: String("spark.sql.execution.id")
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
- def buildURI(uri: URI, fragment: String): URI
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
def
debug(message: ⇒ Any, t: Throwable): Unit
- Definition Classes
- Logging
-
def
debug(message: ⇒ Any): Unit
- Definition Classes
- Logging
- def deployMode: String
- lazy val diagnostics: String
- def engineId: String
- def engineName: String
- def engineUrl: String
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
error(message: ⇒ Any): Unit
- Definition Classes
- Logging
-
def
error(message: ⇒ Any, t: Throwable): Unit
- Definition Classes
- Logging
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
getSessionConf[T](configEntry: ConfigEntry[T], spark: SparkSession): T
Get session level config value
Get session level config value
- T
any type
- configEntry
configEntry
- spark
sparkSession
- returns
session level config value, if spark not set this config, default return kyuubi's config
- def globalSparkContext: SparkContext
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
info(message: ⇒ Any, t: Throwable): Unit
- Definition Classes
- Logging
-
def
info(message: ⇒ Any): Unit
- Definition Classes
- Logging
-
def
initializeLoggerIfNecessary(isInterpreter: Boolean): Unit
- Attributes
- protected
- Definition Classes
- Logging
- def initializeSparkSession(spark: SparkSession, initializationSQLs: Seq[String]): Unit
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
def
logger: Logger
- Attributes
- protected
- Definition Classes
- Logging
-
def
loggerName: String
- Attributes
- protected
- Definition Classes
- Logging
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
def
warn(message: ⇒ Any, t: Throwable): Unit
- Definition Classes
- Logging
-
def
warn(message: ⇒ Any): Unit
- Definition Classes
- Logging