package
datanode
Type Members
-
-
-
case class
InsertTableQuery(table: String, columns: List[String]) extends TableQuery with Product with Serializable
-
case class
RedshiftDataNode extends DataNode with Product with Serializable
-
-
case class
S3File(id: PipelineObjectId, filePath: String, dataFormat: Option[DataFormat], preconditions: Seq[Precondition], onSuccessAlarms: Seq[SnsAlarm], onFailAlarms: Seq[SnsAlarm]) extends S3DataNode with Product with Serializable
-
case class
S3Folder(id: PipelineObjectId, directoryPath: String = "", dataFormat: Option[DataFormat] = None, preconditions: Seq[Precondition] = Seq(), onSuccessAlarms: Seq[SnsAlarm] = Seq(), onFailAlarms: Seq[SnsAlarm] = Seq()) extends S3DataNode with Product with Serializable
-
case class
SelectTableQuery(table: String, columns: List[String], whereClause: Option[String]) extends TableQuery with Product with Serializable
-
-
trait
TableQuery extends AnyRef
Value Members
-
-
-
object
S3File extends Serializable
-
object
S3Folder extends Serializable
-
object
SqlDataNode extends Serializable
this is not tested or documented anywhere in AWS Data Pipeline Docs, we are assuming the insert syntax is 'insert into mytable (col1, col2, ...)' without 'values'.