c

ai.chronon.spark

TableUtils

case class TableUtils(sparkSession: SparkSession) extends Product with Serializable

Linear Supertypes
Serializable, Product, Equals, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. TableUtils
  2. Serializable
  3. Product
  4. Equals
  5. AnyRef
  6. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. Protected

Instance Constructors

  1. new TableUtils(sparkSession: SparkSession)

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##: Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. val aggregationParallelism: Int
  5. def allPartitions(tableName: String, partitionColumnsFilter: Seq[String] = Seq.empty): Seq[Map[String, String]]
  6. def alterTableProperties(tableName: String, properties: Map[String, String]): Unit
  7. def archiveOrDropTableIfExists(tableName: String, timestamp: Option[Instant]): Unit
  8. def archiveTableIfExists(tableName: String, timestamp: Option[Instant]): Unit
  9. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  10. val backfillValidationEnforced: Boolean
  11. val blockingCacheEviction: Boolean
  12. val bloomFilterThreshold: Long
  13. val cacheLevel: Option[StorageLevel]
  14. val cacheLevelString: String
  15. def checkTablePermission(tableName: String, fallbackPartition: String = partitionSpec.before(partitionSpec.at(System.currentTimeMillis()))): Boolean
  16. def chunk(partitions: Set[String]): Seq[PartitionRange]
  17. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.CloneNotSupportedException]) @native()
  18. def columnSizeEstimator(dataType: DataType): Long
  19. def createDatabase(database: String): Boolean
  20. def dropPartitionRange(tableName: String, startDate: String, endDate: String, subPartitionFilters: Map[String, String] = Map.empty): Unit
  21. def dropPartitions(tableName: String, partitions: Seq[String], partitionColumn: String = partitionColumn, subPartitionFilters: Map[String, String] = Map.empty): Unit
  22. def dropTableIfExists(tableName: String): Unit
  23. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  24. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.Throwable])
  25. def firstAvailablePartition(tableName: String, subPartitionFilters: Map[String, String] = Map.empty): Option[String]
  26. final def getClass(): Class[_ <: AnyRef]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  27. def getColumnsFromQuery(query: String): Seq[String]
  28. def getFieldNames(schema: StructType): Seq[String]
  29. def getSchemaFromTable(tableName: String): StructType
  30. def getTableProperties(tableName: String): Option[Map[String, String]]
  31. def insertPartitions(df: DataFrame, tableName: String, tableProperties: Map[String, String] = null, partitionColumns: Seq[String] = Seq(partitionColumn), saveMode: SaveMode = SaveMode.Overwrite, fileFormat: String = "PARQUET", autoExpand: Boolean = false, stats: Option[DfStats] = None, sortByCols: Seq[String] = Seq.empty): Unit
  32. def insertUnPartitioned(df: DataFrame, tableName: String, tableProperties: Map[String, String] = null, saveMode: SaveMode = SaveMode.Overwrite, fileFormat: String = "PARQUET"): Unit
  33. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  34. def isPartitioned(tableName: String): Boolean
  35. val joinPartParallelism: Int
  36. def lastAvailablePartition(tableName: String, subPartitionFilters: Map[String, String] = Map.empty): Option[String]
  37. def loadEntireTable(tableName: String): DataFrame
  38. lazy val logger: Logger
    Annotations
    @transient()
  39. val maxWait: Int
  40. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  41. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  42. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  43. def parsePartition(pstring: String): Map[String, String]
  44. val partitionColumn: String
  45. val partitionSpec: PartitionSpec
  46. def partitions(tableName: String, subPartitionsFilter: Map[String, String] = Map.empty): Seq[String]
  47. def preAggRepartition(rdd: RDD[Row]): RDD[Row]
  48. def preAggRepartition(df: DataFrame): DataFrame
  49. def productElementNames: Iterator[String]
    Definition Classes
    Product
  50. val smallModeNumRowsCutoff: Int
  51. val smallModelEnabled: Boolean
  52. val sparkSession: SparkSession
  53. def sql(query: String): DataFrame
  54. final def synchronized[T0](arg0: => T0): T0
    Definition Classes
    AnyRef
  55. def tableExists(tableName: String): Boolean
  56. def unfilledRanges(outputTable: String, outputPartitionRange: PartitionRange, inputTables: Option[Seq[String]] = None, inputTableToSubPartitionFiltersMap: Map[String, Map[String, String]] = Map.empty, inputToOutputShift: Int = 0, skipFirstHole: Boolean = true): Option[Seq[PartitionRange]]
  57. val useIceberg: Boolean
  58. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException])
  59. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException])
  60. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException]) @native()
  61. def wrapWithCache[T](opString: String, dataFrame: DataFrame)(func: => T): Try[T]

Deprecated Value Members

  1. def dropPartitionsAfterHole(inputTable: String, outputTable: String, partitionRange: PartitionRange, subPartitionFilters: Map[String, String] = Map.empty): Option[String]
    Annotations
    @deprecated
    Deprecated

Inherited from Serializable

Inherited from Product

Inherited from Equals

Inherited from AnyRef

Inherited from Any

Ungrouped