object
ConfigUtils
Type Members
-
case class
ConfigError(path: String, lineNumber: Option[Int], message: String) extends Error with Product with Serializable
-
-
case class
Edge(source: Vertex, target: Vertex) extends Product with Serializable
-
sealed
trait
Error extends AnyRef
-
-
case class
Graph(vertices: List[Vertex], edges: List[Edge], containsPipelineStagePlugin: Boolean) extends Product with Serializable
-
type
IntList = List[Int]
-
case class
StageError(idx: Int, stage: String, lineNumber: Int, errors: Errors) extends Error with Product with Serializable
-
type
StringConfigValue = Either[Errors, String]
-
type
StringList = List[String]
-
case class
Vertex(stageId: Int, name: String) extends Product with Serializable
Value Members
-
final
def
!=(arg0: Any): Boolean
-
final
def
##(): Int
-
final
def
==(arg0: Any): Boolean
-
object
ConfigError extends Serializable
-
-
-
final
def
asInstanceOf[T0]: T0
-
def
checkValidKeys(c: Config)(expectedKeys: ⇒ Seq[String]): Either[Errors, String]
-
def
checkValidKeysReflection[T](c: Config)(implicit arg0: scala.reflect.api.JavaUniverse.TypeTag[T]): Seq[String]
-
def
classAccessors[T](implicit arg0: scala.reflect.api.JavaUniverse.TypeTag[T]): List[String]
-
def
clone(): AnyRef
-
final
def
eq(arg0: AnyRef): Boolean
-
def
equals(arg0: Any): Boolean
-
def
finalize(): Unit
-
final
def
getClass(): Class[_]
-
def
getConfigString(uri: URI, argsMap: Map[String, String], arcContext: ARCContext)(implicit spark: SparkSession, logger: Logger): Either[List[Error], String]
-
def
hashCode(): Int
-
final
def
isInstanceOf[T0]: Boolean
-
def
levenshteinDistance(keys: Seq[String], input: String)(limit: Int): Seq[String]
-
final
def
ne(arg0: AnyRef): Boolean
-
final
def
notify(): Unit
-
final
def
notifyAll(): Unit
-
def
paramsToOptions(params: Map[String, String], options: Seq[String]): Map[String, String]
-
def
parseConfig(uri: Either[String, URI], argsMap: Map[String, String], graph: Graph, arcContext: ARCContext)(implicit spark: SparkSession, logger: Logger): Either[List[Error], (ETLPipeline, Graph, ARCContext)]
-
def
parseDataType(path: String)(datatype: String)(implicit c: Config): Either[Errors, DataType]
-
def
parseDelimiter(path: String)(delim: String)(implicit c: Config): Either[Errors, Delimiter]
-
def
parseEncoding(path: String)(encoding: String)(implicit c: Config): Either[Errors, EncodingType]
-
def
parseFailMode(path: String)(delim: String)(implicit c: Config): Either[Errors, FailModeType]
-
def
parseIsolationLevel(path: String)(quote: String)(implicit c: Config): Either[Errors, IsolationLevelType]
-
def
parseOutputModeType(path: String)(delim: String)(implicit c: Config): Either[Errors, OutputModeType]
-
def
parsePipeline(configUri: Option[String], argsMap: Map[String, String], graph: Graph, arcContext: ARCContext)(implicit spark: SparkSession, logger: Logger): Either[List[Error], (ETLPipeline, Graph, ARCContext)]
-
def
parseQuote(path: String)(quote: String)(implicit c: Config): Either[Errors, QuoteCharacter]
-
def
parseResponseType(path: String)(delim: String)(implicit c: Config): Either[Errors, ResponseType]
-
def
parseSaveMode(path: String)(delim: String)(implicit c: Config): Either[Errors, SaveMode]
-
def
readAuthentication(path: String)(implicit c: Config): Either[Errors, Option[Authentication]]
-
def
readAvroExtract(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readAvroLoad(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readAzureCosmosDBExtract(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readAzureEventHubsLoad(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readBytesExtract(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readConsoleLoad(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readCustomStage(idx: Int, graph: Graph, stageType: String, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readDatabricksDeltaExtract(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readDatabricksDeltaLoad(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readDatabricksSQLDWLoad(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readDelimitedExtract(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config, ctx: ARCContext): (Either[List[StageError], PipelineStage], Graph)
-
def
readDelimitedLoad(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readDiffTransform(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config, ctx: ARCContext): (Either[List[StageError], PipelineStage], Graph)
-
def
readElasticsearchExtract(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readElasticsearchLoad(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readEqualityValidate(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readHTTPExecute(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readHTTPExtract(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readHTTPLoad(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readHTTPTransform(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readImageExtract(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readJDBCExecute(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readJDBCExtract(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readJDBCLoad(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readJSONExtract(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readJSONLoad(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readJSONTransform(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readKafkaCommitExecute(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readKafkaExtract(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readKafkaLoad(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readMLTransform(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readMap(path: String, c: Config): Map[String, String]
-
def
readMetadataFilterTransform(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readORCExtract(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readORCLoad(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readParquetExtract(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readParquetLoad(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config, ctx: ARCContext): (Either[List[StageError], PipelineStage], Graph)
-
def
readPipeline(c: Config, configMD5: String, uri: String, argsMap: Map[String, String], graph: Graph, arcContext: ARCContext)(implicit spark: SparkSession, logger: Logger): Either[List[Error], (ETLPipeline, Graph, ARCContext)]
-
def
readPipelineExecute(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String], argsMap: Map[String, String], arcContext: ARCContext)(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readRateExtract(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readSQLTransform(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config, ctx: ARCContext): (Either[List[StageError], PipelineStage], Graph)
-
def
readSQLValidate(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readTensorFlowServingTransform(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readTextExtract(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readTextLoad(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config, ctx: ARCContext): (Either[List[StageError], PipelineStage], Graph)
-
def
readTypingTransform(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readXMLExtract(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
readXMLLoad(idx: Int, graph: Graph, name: StringConfigValue, params: Map[String, String])(implicit spark: SparkSession, logger: Logger, c: Config): (Either[List[StageError], PipelineStage], Graph)
-
def
resolveConfigPlugins(c: Config, base: Config, arcContext: ARCContext)(implicit logger: Logger): List[Map[String, AnyRef]]
-
def
resolveLifecyclePlugins(c: Config, arcContext: ARCContext)(implicit logger: Logger): List[LifecyclePlugin]
-
def
stringOrDefault(sv: StringConfigValue, default: String): String
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
-
def
toString(): String
-
def
validateAzureSharedKey(path: String)(authentication: Option[Authentication])(implicit c: Config): Either[Errors, Option[Authentication]]
-
def
validateURI(path: String)(uri: String)(implicit c: Config): Either[Errors, URI]
-
final
def
wait(): Unit
-
final
def
wait(arg0: Long, arg1: Int): Unit
-
final
def
wait(arg0: Long): Unit
Inherited from AnyRef
Inherited from Any