class SparkSessionBuilder extends Builder[SparkSession]
Configure and build new sparkSession according to given usages
Usage:
// Auto-configure val spark: SparkSession = new SparkSessionBuilder("cassandra", "postgres").build().get() // Build with your own SparkConf val spark: SparkSession = new SparkSessionBuilder().configure(yourSparkConf).build().get()
- Annotations
- @Evolving()
- Alphabetic
- By Inheritance
- SparkSessionBuilder
- Builder
- Logging
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Instance Constructors
-
new
SparkSessionBuilder(usages: String*)
- usages
usages of the sparkSession, could be a list of the following elements:
- cassandra
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
appName: String
Get Spark application name
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
build(): SparkSessionBuilder.this.type
Automatically build a SparkSession
Automatically build a SparkSession
- Definition Classes
- SparkSessionBuilder → Builder
-
def
cassandraHost: String
Get cassandar host value
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
def
configure(conf: SparkConf): SparkSessionBuilder.this.type
Wrapper of withSparkConf
Wrapper of withSparkConf
- conf
spark configuration
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
def
get(): SparkSession
Build a spark session with the current configuration
Build a spark session with the current configuration
- returns
spark session
- Definition Classes
- SparkSessionBuilder → Builder
-
def
get(key: String): String
Get a SparkConf value
Get a SparkConf value
- key
key of spark conf
- returns
string if the key exists, null otherwise
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
getOrCreate(): SparkSession
- Definition Classes
- Builder
-
def
getShufflePartitions: String
Get spark.sql.shuffle.partitions
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
def
log: Logger
- Attributes
- protected
- Definition Classes
- Logging
-
def
logDebug(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logError(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logInfo(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logName: String
- Attributes
- protected
- Definition Classes
- Logging
-
def
logTrace(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logWarning(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- def registerClass(cls: Class[_]): SparkSessionBuilder.this.type
- def registerClasses(cls: Array[Class[_]]): SparkSessionBuilder.this.type
- def set(options: Map[String, String]): SparkSessionBuilder.this.type
-
def
set(key: String, value: String): SparkSessionBuilder.this.type
Set a SparkConf property
Set a SparkConf property
- key
key of spark conf
- value
value of spark conf
-
def
setAppName(name: String): SparkSessionBuilder.this.type
Set the name of spark application
Set the name of spark application
- name
name of app
-
def
setCassandraHost(host: String): SparkSessionBuilder.this.type
Set the application envir
Set the application envir
- host
cassandra host
-
def
setEnv(env: String): SparkSessionBuilder.this.type
Set application environment
Set application environment
- env
LOCAL, DEV, PREPROD, PROD, EMR
- def setKryoRegistrationRequired(boolean: Boolean): SparkSessionBuilder.this.type
-
def
setShufflePartitions(par: Int): SparkSessionBuilder.this.type
Set spark.sql.shuffle.partitions
Set spark.sql.shuffle.partitions
- par
default number of partition
-
def
setSparkMaster(url: String): SparkSessionBuilder.this.type
Set Master URL for Spark
Set Master URL for Spark
- url
url of master
-
def
sparkMasterUrl: String
Get Spark Master URL
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
- def useKryo: Boolean
- def useKryo(boo: Boolean): SparkSessionBuilder.this.type
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
def
withSparkConf(conf: SparkConf): SparkSessionBuilder.this.type
Override the existing configuration with an user defined configuration
Override the existing configuration with an user defined configuration
- conf
spark configuration