class SparkRepositoryBuilder[DataType] extends Builder[SparkRepository[DataType]]
The SparkRepositoryBuilder will build a SparkRepository according to the given DataType and Storage
- DataType
type of data
- Annotations
- @Evolving()
Linear Supertypes
Ordering
- Alphabetic
- By Inheritance
Inherited
- SparkRepositoryBuilder
- Builder
- Logging
- AnyRef
- Any
- Hide All
- Show All
Visibility
- Public
- All
Instance Constructors
- new SparkRepositoryBuilder(config: Config)(implicit arg0: scala.reflect.api.JavaUniverse.TypeTag[DataType])
- new SparkRepositoryBuilder(storage: Storage)(implicit arg0: scala.reflect.api.JavaUniverse.TypeTag[DataType])
- new SparkRepositoryBuilder()(implicit arg0: scala.reflect.api.JavaUniverse.TypeTag[DataType])
-
new
SparkRepositoryBuilder(storage: Option[Storage], config: Option[Config])(implicit arg0: scala.reflect.api.JavaUniverse.TypeTag[DataType])
- storage
type of storage
- config
a com.typesafe.config.Config object
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
build(): SparkRepositoryBuilder.this.type
Build an object
Build an object
- Definition Classes
- SparkRepositoryBuilder → Builder
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
- var config: Option[Config]
-
def
createConnector(): Connector
Create the connector according to the storage type
Create the connector according to the storage type
- returns
Connector
- Attributes
- protected[this]
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
def
get(): SparkRepository[DataType]
Get the built spark repository
Get the built spark repository
- returns
SparkRepository
- Definition Classes
- SparkRepositoryBuilder → Builder
- def getAs[T](key: String)(implicit converter: Serializer[T]): Option[T]
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
getOrCreate(): SparkRepository[DataType]
- Definition Classes
- Builder
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
def
log: Logger
- Attributes
- protected
- Definition Classes
- Logging
-
def
logDebug(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logError(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logInfo(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logName: String
- Attributes
- protected
- Definition Classes
- Logging
-
def
logTrace(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
def
logWarning(msg: ⇒ String): Unit
- Attributes
- protected
- Definition Classes
- Logging
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
- def set[T](key: String, value: T)(implicit converter: Serializer[T]): SparkRepositoryBuilder.this.type
- def setAddColorColumns(boo: Boolean): SparkRepositoryBuilder.this.type
- def setClusteringKeys(cols: Option[Seq[String]]): SparkRepositoryBuilder.this.type
- def setConnector(connector: Connector): SparkRepositoryBuilder.this.type
- def setCustomConnectorClass(cls: String): SparkRepositoryBuilder.this.type
- def setDataAddress(address: String): SparkRepositoryBuilder.this.type
- def setDateFormat(fmt: String): SparkRepositoryBuilder.this.type
- def setDelimiter(delimiter: String): SparkRepositoryBuilder.this.type
- def setExcerptSize(size: Long): SparkRepositoryBuilder.this.type
- def setHeader(boo: Boolean): SparkRepositoryBuilder.this.type
- def setInferSchema(boo: Boolean): SparkRepositoryBuilder.this.type
- def setKeyspace(keyspace: String): SparkRepositoryBuilder.this.type
- def setMaxRowsInMemory(maxRowsInMemory: Long): SparkRepositoryBuilder.this.type
- def setPartitionKeys(cols: Option[Seq[String]]): SparkRepositoryBuilder.this.type
- def setPath(path: String): SparkRepositoryBuilder.this.type
- def setSaveMode(saveMode: SaveMode): SparkRepositoryBuilder.this.type
- def setSchema(schema: StructType): SparkRepositoryBuilder.this.type
- def setStorage(storage: Storage): SparkRepositoryBuilder.this.type
- def setTable(table: String): SparkRepositoryBuilder.this.type
- def setTimestampFormat(fmt: String): SparkRepositoryBuilder.this.type
- def setTreatEmptyValuesAsNulls(boo: Boolean): SparkRepositoryBuilder.this.type
- def setUseHeader(boo: Boolean): SparkRepositoryBuilder.this.type
- def setWorkbookPassword(pwd: String): SparkRepositoryBuilder.this.type
- var storage: Option[Storage]
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()