Packages

package task

Type Members

  1. case class SparkDeDupTask[I <: Product](name: String, inputLocation: String, inputType: IOType, inputFilter: String = "1 = 1", transformation: (Dataset[I]) => Dataset[Row], checkpointLocation: String, eventTimeCol: String, delayThreshold: String, deDupCols: Seq[String])(implicit evidence$1: scala.reflect.api.JavaUniverse.TypeTag[I]) extends EtlTask[SparkEnv, Unit] with Product with Serializable
  2. case class SparkReadTask[I <: Product, O <: Product](name: String, inputLocation: List[String], inputType: IOType, inputFilter: String = "1 = 1", transformFunction: Option[(SparkSession, Dataset[I]) => Dataset[O]] = None)(implicit evidence$1: scala.reflect.api.JavaUniverse.TypeTag[I], evidence$2: scala.reflect.api.JavaUniverse.TypeTag[O]) extends EtlTask[SparkEnv, Dataset[O]] with Product with Serializable
    Annotations
    @SuppressWarnings()
  3. case class SparkReadWriteTask[I <: Product, O <: Product](name: String, inputLocation: List[String], inputType: IOType, inputFilter: String = "1 = 1", outputLocation: String, outputType: IOType, outputSaveMode: SaveMode = SaveMode.Append, outputPartitionCol: Seq[String] = Seq.empty[String], outputFilename: Option[String] = None, outputCompression: String = "none", outputRepartitioning: Boolean = false, outputRepartitioningNum: Int = 1, transformFunction: Option[(SparkSession, Dataset[I]) => Dataset[O]] = None)(implicit evidence$1: scala.reflect.api.JavaUniverse.TypeTag[I], evidence$2: scala.reflect.api.JavaUniverse.TypeTag[O]) extends EtlTask[SparkEnv, Unit] with Product with Serializable
    Annotations
    @SuppressWarnings()
  4. case class SparkTask[OP](name: String, transformFunction: (SparkSession) => OP) extends EtlTask[SparkEnv, OP] with Product with Serializable

Ungrouped