class PySparkInterpreter extends PythonInterpreter
Linear Supertypes
Ordering
- Alphabetic
- By Inheritance
Inherited
- PySparkInterpreter
- PythonInterpreter
- Interpreter
- AnyRef
- Any
- Hide All
- Show All
Visibility
- Public
- All
Instance Constructors
- new PySparkInterpreter(_compiler: ScalaCompiler, jepInstance: Jep, jepExecutor: Executor, jepThread: AtomicReference[Thread], jepBlockingService: Blocking, runtime: Runtime[Any], pyApi: PythonAPI, venvPath: Option[Path])
Type Members
-
case class
PythonState extends State with Product with Serializable
- Definition Classes
- PythonInterpreter
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
def
compile(parsed: PyObject, cell: String): Task[PyObject]
- Attributes
- protected
- Definition Classes
- PythonInterpreter
-
val
compiler: ScalaCompiler
- Definition Classes
- PythonInterpreter
-
def
completionsAt(code: String, pos: Int, state: State): Task[List[Completion]]
- Definition Classes
- PythonInterpreter → Interpreter
-
def
convertFromPython(jep: Jep): PartialFunction[(String, PyObject), (scala.tools.nsc.interactive.Global.Type, Any)]
- Attributes
- protected
- Definition Classes
- PySparkInterpreter → PythonInterpreter
-
def
convertToPython(jep: Jep): PartialFunction[(String, Any), AnyRef]
- Attributes
- protected
- Definition Classes
- PySparkInterpreter → PythonInterpreter
-
def
defaultConvertToPython(nv: (String, Any)): AnyRef
- Attributes
- protected
- Definition Classes
- PythonInterpreter
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
errorCause(get: PyCallable): Option[Throwable]
- Attributes
- protected
- Definition Classes
- PySparkInterpreter → PythonInterpreter
-
def
eval[T](code: String)(implicit arg0: ClassTag[T]): Task[T]
- Attributes
- protected[python]
- Definition Classes
- PythonInterpreter
-
def
exec(code: String): Task[Unit]
- Attributes
- protected[python]
- Definition Classes
- PythonInterpreter
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
- val gatewayRef: AtomicReference[GatewayServer]
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
getValue(name: String): Task[PyObject]
- Attributes
- protected[python]
- Definition Classes
- PythonInterpreter
-
def
handlePyError(get: PyCallable, trace: ArrayList[AnyRef]): Throwable
- Definition Classes
- PythonInterpreter
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
init(state: State): RIO[InterpreterEnv, State]
- Definition Classes
- PySparkInterpreter → PythonInterpreter → Interpreter
-
def
injectGlobals(globals: PyObject): RIO[CurrentRuntime, Unit]
- Attributes
- protected
- Definition Classes
- PySparkInterpreter → PythonInterpreter
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
def
jep[T](fn: (Jep) ⇒ T): Task[T]
- Attributes
- protected[python]
- Definition Classes
- PythonInterpreter
-
def
matplotlib: String
- Attributes
- protected
- Definition Classes
- PythonInterpreter
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
def
parametersAt(code: String, pos: Int, state: State): Task[Option[Signatures]]
- Definition Classes
- PythonInterpreter → Interpreter
-
def
parse(code: String, cell: String): Task[PyObject]
- Attributes
- protected
- Definition Classes
- PythonInterpreter
-
def
populateGlobals(state: State): Task[PyObject]
- Attributes
- protected
- Definition Classes
- PythonInterpreter
-
def
pysparkImports: String
Handle setting up PySpark.
Handle setting up PySpark.
First, we need to pick the python interpreter. Unfortunately this means we need to re-implement Spark's interpreter configuration logic, because that's only implemented inside SparkSubmit (and only when you use
pyspark-shell
actually).Here's the order we follow for the driver python executable (from org.apache.spark.launcher.SparkSubmitCommandBuilder):
- conf spark.pyspark.driver.python 2. conf spark.pyspark.python 3. environment variable PYSPARK_DRIVER_PYTHON 4. environment variable PYSPARK_PYTHON
For the executors we just omit the driver python - so it's just:
- conf spark.pyspark.python 2. environment variable PYSPARK_PYTHON
Additionally, to load pyspark itself we try to grab the its location from the Spark distribution. This ensures that all the versions match up.
WARNING: Using pyspark from
pip install pyspark
, could break things - don't use it!- Attributes
- protected
-
def
run(compiled: PyObject, globals: PyObject, state: State): RIO[CurrentRuntime, State]
- Attributes
- protected
- Definition Classes
- PythonInterpreter
-
def
run(code: String, state: State): RIO[InterpreterEnv, State]
- Definition Classes
- PythonInterpreter → Interpreter
-
def
setValue(name: String, value: AnyRef): Task[Unit]
- Attributes
- protected[python]
- Definition Classes
- PythonInterpreter
-
def
setup: String
- Attributes
- protected
- Definition Classes
- PythonInterpreter
-
def
shutdown(): Task[Unit]
- Definition Classes
- PythonInterpreter → Interpreter
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()