Class

ai.chronon.spark

TableUtils

Related Doc: package spark

Permalink

case class TableUtils(sparkSession: SparkSession) extends Product with Serializable

Linear Supertypes
Serializable, Serializable, Product, Equals, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. TableUtils
  2. Serializable
  3. Serializable
  4. Product
  5. Equals
  6. AnyRef
  7. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Instance Constructors

  1. new TableUtils(sparkSession: SparkSession)

    Permalink

Value Members

  1. final def !=(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int

    Permalink
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  4. val aggregationParallelism: Int

    Permalink
  5. def allPartitions(tableName: String, partitionColumnsFilter: Seq[String] = Seq.empty): Seq[Map[String, String]]

    Permalink
  6. def alterTableProperties(tableName: String, properties: Map[String, String], unsetProperties: Seq[String] = Seq()): Unit

    Permalink
  7. def archiveOrDropTableIfExists(tableName: String, timestamp: Option[Instant]): Unit

    Permalink
  8. def archiveTableIfExists(tableName: String, timestamp: Option[Instant]): Unit

    Permalink
  9. final def asInstanceOf[T0]: T0

    Permalink
    Definition Classes
    Any
  10. val backfillValidationEnforced: Boolean

    Permalink
  11. val blockingCacheEviction: Boolean

    Permalink
  12. val bloomFilterThreshold: Long

    Permalink
  13. val cacheLevel: Option[StorageLevel]

    Permalink
  14. val cacheLevelString: String

    Permalink
  15. def checkTablePermission(tableName: String, fallbackPartition: String = ..., partitionColOpt: Option[String] = None): Boolean

    Permalink
  16. val chrononAvroSchemaValidation: Boolean

    Permalink
  17. def chunk(partitions: Set[String]): Seq[PartitionRange]

    Permalink
  18. def clone(): AnyRef

    Permalink
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  19. def columnSizeEstimator(dataType: DataType): Long

    Permalink
  20. def createDatabase(database: String): Boolean

    Permalink
  21. def dropPartitionRange(tableName: String, startDate: String, endDate: String, subPartitionFilters: Map[String, String] = Map.empty): Unit

    Permalink
  22. def dropPartitions(tableName: String, partitions: Seq[String], partitionColumn: String = partitionColumn, subPartitionFilters: Map[String, String] = Map.empty): Unit

    Permalink
  23. def dropTableIfExists(tableName: String): Unit

    Permalink
  24. final def eq(arg0: AnyRef): Boolean

    Permalink
    Definition Classes
    AnyRef
  25. def finalize(): Unit

    Permalink
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  26. def firstAvailablePartition(tableName: String, subPartitionFilters: Map[String, String] = Map.empty, partitionColOpt: Option[String] = None): Option[String]

    Permalink
  27. final def getClass(): Class[_]

    Permalink
    Definition Classes
    AnyRef → Any
  28. def getColumnsFromQuery(query: String): Seq[String]

    Permalink
  29. def getFieldNames(schema: StructType): Seq[String]

    Permalink
  30. def getPartitionColumn(columnOpt: Option[String] = None): String

    Permalink
  31. def getPartitionColumn(q: Query): String

    Permalink
  32. def getSchemaFromTable(tableName: String): StructType

    Permalink
  33. def getTableProperties(tableName: String): Option[Map[String, String]]

    Permalink
  34. def insertPartitions(df: DataFrame, tableName: String, tableProperties: Map[String, String] = null, partitionColumns: Seq[String] = Seq(partitionColumn), saveMode: SaveMode = SaveMode.Overwrite, fileFormat: String = "PARQUET", autoExpand: Boolean = false, stats: Option[DfStats] = None, sortByCols: Seq[String] = Seq.empty): Unit

    Permalink
  35. def insertUnPartitioned(df: DataFrame, tableName: String, tableProperties: Map[String, String] = null, saveMode: SaveMode = SaveMode.Overwrite, fileFormat: String = "PARQUET"): Unit

    Permalink
  36. final def isInstanceOf[T0]: Boolean

    Permalink
    Definition Classes
    Any
  37. def isPartitioned(tableName: String, partitionColOpt: Option[String] = None): Boolean

    Permalink
  38. val joinPartParallelism: Int

    Permalink
  39. def lastAvailablePartition(tableName: String, subPartitionFilters: Map[String, String] = Map.empty, partitionColOpt: Option[String] = None): Option[String]

    Permalink
  40. def loadEntireTable(tableName: String): DataFrame

    Permalink
  41. lazy val logger: Logger

    Permalink
  42. val maxWait: Int

    Permalink
  43. val minWriteShuffleParallelism: Int

    Permalink
  44. final def ne(arg0: AnyRef): Boolean

    Permalink
    Definition Classes
    AnyRef
  45. final def notify(): Unit

    Permalink
    Definition Classes
    AnyRef
  46. final def notifyAll(): Unit

    Permalink
    Definition Classes
    AnyRef
  47. val partitionColumn: String

    Permalink
  48. val partitionSpec: PartitionSpec

    Permalink
  49. def partitions(tableName: String, subPartitionsFilter: Map[String, String] = Map.empty, partitionColOpt: Option[String] = None): Seq[String]

    Permalink
  50. def preAggRepartition(rdd: RDD[Row]): RDD[Row]

    Permalink
  51. def preAggRepartition(df: DataFrame): DataFrame

    Permalink
  52. val smallModeNumRowsCutoff: Int

    Permalink
  53. val smallModelEnabled: Boolean

    Permalink
  54. val sparkSession: SparkSession

    Permalink
  55. def sql(query: String): DataFrame

    Permalink
  56. def sqlWithDefaultPartitionColumn(query: String, existingPartitionColumn: String): DataFrame

    Permalink
  57. final def synchronized[T0](arg0: ⇒ T0): T0

    Permalink
    Definition Classes
    AnyRef
  58. def tableExists(tableName: String): Boolean

    Permalink
  59. def tableReadFormat(tableName: String): Format

    Permalink
  60. def unfilledRanges(outputTable: String, outputPartitionRange: PartitionRange, inputTables: Option[Seq[String]] = None, inputTableToSubPartitionFiltersMap: Map[String, Map[String, String]] = Map.empty, inputTableToPartitionColumnsMap: Map[String, String] = Map.empty, inputToOutputShift: Int = 0, skipFirstHole: Boolean = true): Option[Seq[PartitionRange]]

    Permalink
  61. final def wait(): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  62. final def wait(arg0: Long, arg1: Int): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  63. final def wait(arg0: Long): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  64. def wrapWithCache[T](opString: String, dataFrame: DataFrame)(func: ⇒ T): Try[T]

    Permalink

Deprecated Value Members

  1. def dropPartitionsAfterHole(inputTable: String, outputTable: String, partitionRange: PartitionRange, subPartitionFilters: Map[String, String] = Map.empty): Option[String]

    Permalink
    Annotations
    @deprecated
    Deprecated

Inherited from Serializable

Inherited from Serializable

Inherited from Product

Inherited from Equals

Inherited from AnyRef

Inherited from Any

Ungrouped