c

ai.chronon.spark

TableUtils

case class TableUtils(sparkSession: SparkSession) extends Product with Serializable

Linear Supertypes
Serializable, Product, Equals, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. TableUtils
  2. Serializable
  3. Product
  4. Equals
  5. AnyRef
  6. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. Protected

Instance Constructors

  1. new TableUtils(sparkSession: SparkSession)

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##: Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. val aggregationParallelism: Int
  5. def allPartitions(tableName: String, partitionColumnsFilter: Seq[String] = Seq.empty): Seq[Map[String, String]]
  6. def alterTableProperties(tableName: String, properties: Map[String, String], unsetProperties: Seq[String] = Seq()): Unit
  7. def archiveOrDropTableIfExists(tableName: String, timestamp: Option[Instant]): Unit
  8. def archiveTableIfExists(tableName: String, timestamp: Option[Instant]): Unit
  9. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  10. val backfillValidationEnforced: Boolean
  11. val blockingCacheEviction: Boolean
  12. val bloomFilterThreshold: Long
  13. val cacheLevel: Option[StorageLevel]
  14. val cacheLevelString: String
  15. def checkTablePermission(tableName: String, fallbackPartition: String = partitionSpec.before(partitionSpec.at(System.currentTimeMillis())), partitionColOpt: Option[String] = None): Boolean
  16. def chunk(partitions: Set[String]): Seq[PartitionRange]
  17. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.CloneNotSupportedException]) @native()
  18. def columnSizeEstimator(dataType: DataType): Long
  19. def createDatabase(database: String): Boolean
  20. def dropPartitionRange(tableName: String, startDate: String, endDate: String, subPartitionFilters: Map[String, String] = Map.empty): Unit
  21. def dropPartitions(tableName: String, partitions: Seq[String], partitionColumn: String = partitionColumn, subPartitionFilters: Map[String, String] = Map.empty): Unit
  22. def dropTableIfExists(tableName: String): Unit
  23. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  24. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.Throwable])
  25. def firstAvailablePartition(tableName: String, subPartitionFilters: Map[String, String] = Map.empty, partitionColOpt: Option[String] = None): Option[String]
  26. final def getClass(): Class[_ <: AnyRef]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  27. def getColumnsFromQuery(query: String): Seq[String]
  28. def getFieldNames(schema: StructType): Seq[String]
  29. def getPartitionColumn(columnOpt: Option[String] = None): String
  30. def getPartitionColumn(q: Query): String
  31. def getSchemaFromTable(tableName: String): StructType
  32. def getTableProperties(tableName: String): Option[Map[String, String]]
  33. def insertPartitions(df: DataFrame, tableName: String, tableProperties: Map[String, String] = null, partitionColumns: Seq[String] = Seq(partitionColumn), saveMode: SaveMode = SaveMode.Overwrite, fileFormat: String = "PARQUET", autoExpand: Boolean = false, stats: Option[DfStats] = None, sortByCols: Seq[String] = Seq.empty): Unit
  34. def insertUnPartitioned(df: DataFrame, tableName: String, tableProperties: Map[String, String] = null, saveMode: SaveMode = SaveMode.Overwrite, fileFormat: String = "PARQUET"): Unit
  35. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  36. def isPartitioned(tableName: String, partitionColOpt: Option[String] = None): Boolean
  37. val joinPartParallelism: Int
  38. def lastAvailablePartition(tableName: String, subPartitionFilters: Map[String, String] = Map.empty, partitionColOpt: Option[String] = None): Option[String]
  39. def loadEntireTable(tableName: String): DataFrame
  40. lazy val logger: Logger
    Annotations
    @transient()
  41. val maxWait: Int
  42. val minWriteShuffleParallelism: Int
  43. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  44. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  45. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  46. val partitionColumn: String
  47. val partitionSpec: PartitionSpec
  48. def partitions(tableName: String, subPartitionsFilter: Map[String, String] = Map.empty, partitionColOpt: Option[String] = None): Seq[String]
  49. def preAggRepartition(rdd: RDD[Row]): RDD[Row]
  50. def preAggRepartition(df: DataFrame): DataFrame
  51. def productElementNames: Iterator[String]
    Definition Classes
    Product
  52. val smallModeNumRowsCutoff: Int
  53. val smallModelEnabled: Boolean
  54. val sparkSession: SparkSession
  55. def sql(query: String): DataFrame
  56. def sqlWithDefaultPartitionColumn(query: String, existingPartitionColumn: String): DataFrame
  57. final def synchronized[T0](arg0: => T0): T0
    Definition Classes
    AnyRef
  58. def tableExists(tableName: String): Boolean
  59. def tableReadFormat(tableName: String): Format
  60. def unfilledRanges(outputTable: String, outputPartitionRange: PartitionRange, inputTables: Option[Seq[String]] = None, inputTableToSubPartitionFiltersMap: Map[String, Map[String, String]] = Map.empty, inputTableToPartitionColumnsMap: Map[String, String] = Map.empty, inputToOutputShift: Int = 0, skipFirstHole: Boolean = true): Option[Seq[PartitionRange]]
  61. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException])
  62. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException])
  63. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException]) @native()
  64. def wrapWithCache[T](opString: String, dataFrame: DataFrame)(func: => T): Try[T]

Deprecated Value Members

  1. def dropPartitionsAfterHole(inputTable: String, outputTable: String, partitionRange: PartitionRange, subPartitionFilters: Map[String, String] = Map.empty): Option[String]
    Annotations
    @deprecated
    Deprecated

Inherited from Serializable

Inherited from Product

Inherited from Equals

Inherited from AnyRef

Inherited from Any

Ungrouped