object ExecutePython extends Logging

Linear Supertypes
Logging, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. ExecutePython
  2. Logging
  3. AnyRef
  4. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. final val DEFAULT_SPARK_PYTHON_ENV_ARCHIVE_FRAGMENT: String("__kyuubi_spark_python_env__")
  5. final val DEFAULT_SPARK_PYTHON_HOME_ARCHIVE_FRAGMENT: String("__kyuubi_spark_python_home__")
  6. final val IS_PYTHON_APP_KEY: String("spark.yarn.isPython")
  7. final val PY4J_PATH: String("PY4J_PATH")
  8. final val PY4J_REGEX: Regex
  9. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  10. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  11. def createSessionPythonWorker(spark: SparkSession, session: Session): SessionPythonWorker
  12. def debug(message: ⇒ Any, t: Throwable): Unit
    Definition Classes
    Logging
  13. def debug(message: ⇒ Any): Unit
    Definition Classes
    Logging
  14. def defaultSparkHome: String
  15. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  16. def equals(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  17. def error(message: ⇒ Any): Unit
    Definition Classes
    Logging
  18. def error(message: ⇒ Any, t: Throwable): Unit
    Definition Classes
    Logging
  19. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  20. def fromJson[T](json: String)(implicit m: Manifest[T]): T
  21. def fromJson[T](json: String, clz: Class[T]): T
  22. final def getClass(): Class[_]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  23. def getSparkPythonExecFromArchive(spark: SparkSession, session: Session): Option[String]
  24. def getSparkPythonHomeFromArchive(spark: SparkSession, session: Session): Option[String]
  25. def hashCode(): Int
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  26. def info(message: ⇒ Any, t: Throwable): Unit
    Definition Classes
    Logging
  27. def info(message: ⇒ Any): Unit
    Definition Classes
    Logging
  28. def init(): Unit
  29. def initializeLoggerIfNecessary(isInterpreter: Boolean): Unit
    Attributes
    protected
    Definition Classes
    Logging
  30. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  31. def logger: Logger
    Attributes
    protected
    Definition Classes
    Logging
  32. def loggerName: String
    Attributes
    protected
    Definition Classes
    Logging
  33. val mapper: ObjectMapper
  34. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  35. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  36. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  37. def startWatcher(process: Process, sessionId: String): Thread
  38. final def synchronized[T0](arg0: ⇒ T0): T0
    Definition Classes
    AnyRef
  39. def toJson[T](obj: T): String
  40. def toString(): String
    Definition Classes
    AnyRef → Any
  41. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  42. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  43. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  44. def warn(message: ⇒ Any, t: Throwable): Unit
    Definition Classes
    Logging
  45. def warn(message: ⇒ Any): Unit
    Definition Classes
    Logging

Inherited from Logging

Inherited from AnyRef

Inherited from Any

Ungrouped