class
SparkReadWriteStateStep[T <: Product, IPSTATE, O <: Product, OPSTATE] extends EtlStep[IPSTATE, OPSTATE]
Instance Constructors
-
new
SparkReadWriteStateStep(name: String, input_location: Seq[String], input_type: IOType, output_location: String, output_type: IOType, output_filename: Option[String] = None, output_partition_col: Option[String] = None, output_save_mode: SaveMode = SaveMode.Append, output_repartitioning: Boolean = false, transform_function: Option[(Input[T, IPSTATE]) ⇒ Output[O, OPSTATE]] = None)(spark: ⇒ SparkSession, etl_metadata: Map[String, String])(implicit arg0: scala.reflect.api.JavaUniverse.TypeTag[T], arg1: scala.reflect.api.JavaUniverse.TypeTag[O])
Value Members
-
final
def
!=(arg0: Any): Boolean
-
final
def
##(): Int
-
final
def
==(arg0: Any): Boolean
-
final
def
asInstanceOf[T0]: T0
-
def
clone(): AnyRef
-
final
def
eq(arg0: AnyRef): Boolean
-
def
equals(arg0: Any): Boolean
-
val
etl_logger: Logger
-
def
finalize(): Unit
-
final
def
getClass(): Class[_]
-
def
getExecutionMetrics: Map[String, Map[String, String]]
-
def
getStepProperties: Map[String, String]
-
def
hashCode(): Int
-
final
def
isInstanceOf[T0]: Boolean
-
val
name: String
-
final
def
ne(arg0: AnyRef): Boolean
-
final
def
notify(): Unit
-
final
def
notifyAll(): Unit
-
def
process(input_state: IPSTATE): Try[OPSTATE]
-
def
showCorruptedData(): Unit
-
implicit
lazy val
sp: SparkSession
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
-
def
toString(): String
-
final
def
wait(): Unit
-
final
def
wait(arg0: Long, arg1: Int): Unit
-
final
def
wait(arg0: Long): Unit
Inherited from EtlStep[IPSTATE, OPSTATE]
Inherited from AnyRef
Inherited from Any