com.krux.hyperion

objects

package objects

Visibility
  1. Public
  2. All

Type Members

  1. trait DataFormat extends PipelineObject

  2. case class DefaultObject(schedule: Schedule)(implicit hc: HyperionContext) extends AdpDataPipelineAbstractObject with AdpDataPipelineDefaultObject with PipelineObject with Product with Serializable

  3. case class Ec2Resource(id: String, terminateAfter: String, role: Option[String], resourceRole: Option[String], instanceType: String, region: Option[String], imageId: Option[String], securityGroups: Seq[String], securityGroupIds: Seq[String], associatePublicIpAddress: Boolean)(implicit hc: HyperionContext) extends ResourceObject with Product with Serializable

    EC2 resource

  4. trait EmrActivity extends PipelineActivity

  5. trait EmrCluster extends ResourceObject

  6. trait GoogleStorageActivity extends PipelineActivity

  7. case class GoogleStorageDownloadActivity(id: String, runsOn: Ec2Resource, input: String = "", output: Option[S3DataNode] = None, botoConfigUrl: String = "", dependsOn: Seq[PipelineActivity] = Seq())(implicit hc: HyperionContext) extends GoogleStorageActivity with Product with Serializable

    Google Storage Download activity

  8. case class GoogleStorageUploadActivity(id: String, runsOn: Ec2Resource, input: Option[S3DataNode] = None, output: String = "", botoConfigUrl: String = "", dependsOn: Seq[PipelineActivity] = Seq())(implicit hc: HyperionContext) extends GoogleStorageActivity with Product with Serializable

    Google Storage Upload activity

  9. case class JarActivity(id: String, runsOn: Ec2Resource, jar: Option[String] = None, mainClass: Option[String] = None, arguments: Seq[String] = Seq(), dependsOn: Seq[PipelineActivity] = Seq(), input: Option[S3DataNode] = None, output: Option[S3DataNode] = None, stdout: Option[String] = None, stderr: Option[String] = None)(implicit hc: HyperionContext) extends PipelineActivity with Product with Serializable

    Shell command activity

  10. case class MapReduceActivity(id: String, runsOn: EmrCluster, steps: Seq[MapReduceStep] = Seq(), dependsOn: Seq[PipelineActivity] = Seq()) extends EmrActivity with Product with Serializable

    Defines a MapReduce activity

  11. case class MapReduceCluster(id: String = "MapReduceCluster", taskInstanceCount: Int = 0)(implicit hc: HyperionContext) extends EmrCluster with Product with Serializable

    Launch a MapReduce cluster

  12. case class MapReduceStep(jar: String = "", mainClass: String = "", args: Seq[String] = List()) extends Product with Serializable

    A MapReduce step that runs on MapReduce Cluster

  13. trait PipelineActivity extends PipelineObject

  14. trait PipelineObject extends AnyRef

    The base trait of krux data pipeline objects.

  15. case class RedshiftCopyActivity(id: String, input: S3DataNode, insertMode: InsertMode, runsOn: Ec2Resource, output: RedshiftDataNode, transformSql: Option[String] = None, commandOptions: Seq[RedshiftCopyOption] = Seq(), dependsOn: Seq[PipelineActivity] = Seq()) extends PipelineActivity with Product with Serializable

    Redshift copy activity

  16. trait RedshiftCopyOption extends AnyRef

  17. case class RedshiftDataNode(id: String, database: RedshiftDatabase, tableName: String, createTableSql: Option[String] = None, schemaName: Option[String] = None, primaryKeys: Option[Seq[String]] = None) extends PipelineObject with Product with Serializable

    The abstracted RedshiftDataNode

  18. trait RedshiftDatabase extends PipelineObject

    Redshift Database Trait, to use this please extend with an object.

  19. case class RedshiftUnloadActivity(id: String, database: RedshiftDatabase, script: String, s3Path: String, runsOn: Ec2Resource, unloadOptions: Seq[RedshiftUnloadOption] = Seq(), dependsOn: Seq[PipelineActivity] = Seq())(implicit hc: HyperionContext) extends PipelineActivity with Product with Serializable

    Redshift unload activity

  20. trait RedshiftUnloadOption extends AnyRef

  21. trait ResourceObject extends PipelineObject

  22. trait RunnableObject extends AnyRef

    Run time references of runnable objects

  23. trait S3DataNode extends PipelineObject

  24. case class S3File(id: String, filePath: String = "", dataFormat: Option[DataFormat] = None) extends S3DataNode with Product with Serializable

    Defines data from s3

  25. case class S3Folder(id: String, directoryPath: String = "", dataFormat: Option[DataFormat] = None) extends S3DataNode with Product with Serializable

    Defines data from s3 directory

  26. case class Schedule(id: String = "PipelineSchedule", start: Option[github.nscala_time.time.Imports.DateTime] = None, period: DpPeriod = 1.day, occurrences: Option[Int] = None, scheduleType: ScheduleType = Cron) extends PipelineObject with Product with Serializable

    Cron liked schedule that runs at defined period.

    Cron liked schedule that runs at defined period.

    Note

    If start time given is a past time, data pipeline will perform back fill from the start.

  27. case class ShellCommandActivity(id: String, runsOn: Ec2Resource, command: Option[String] = None, scriptUri: Option[String] = None, scriptArguments: Seq[String] = Seq(), stage: Boolean = true, input: Option[S3DataNode] = None, output: Option[S3DataNode] = None, dependsOn: Seq[PipelineActivity] = Seq(), stdout: Option[String] = None, stderr: Option[String] = None) extends PipelineActivity with Product with Serializable

    Shell command activity

  28. case class SparkActivity(id: String, runsOn: SparkCluster, steps: Seq[SparkStep] = Seq(), dependsOn: Seq[PipelineActivity] = Seq()) extends EmrActivity with Product with Serializable

    Defines a spark activity

  29. case class SparkCluster(id: String, taskInstanceCount: Int, coreInstanceCount: Int, instanceType: String, amiVersion: String, sparkVersion: String, terminateAfter: String)(implicit hc: HyperionContext) extends EmrCluster with Product with Serializable

    Launch a Spark cluster

  30. case class SparkStep(jar: String = "", mainClass: String = "", args: Seq[String] = List())(implicit hc: HyperionContext) extends Product with Serializable

    A spark step that runs on Spark Cluster

  31. case class TsvDataFormat(id: String, column: Option[Seq[String]] = None) extends DataFormat with Product with Serializable

    TSV data format

Value Members

  1. object Ec2Resource extends Serializable

  2. object MapReduceActivity extends RunnableObject with Serializable

  3. object RedshiftCopyActivity extends Enumeration with RunnableObject

  4. object RedshiftCopyOption

  5. object RedshiftUnloadOption

  6. object S3DataNode

  7. object Schedule extends Serializable

  8. object ScheduleType extends Enumeration

  9. object SparkActivity extends RunnableObject with Serializable

  10. object SparkCluster extends Serializable

  11. package aws

Ungrouped