c

ai.chronon.spark

TableUtils

case class TableUtils(sparkSession: SparkSession) extends Product with Serializable

Linear Supertypes
Serializable, Product, Equals, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. TableUtils
  2. Serializable
  3. Product
  4. Equals
  5. AnyRef
  6. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. Protected

Instance Constructors

  1. new TableUtils(sparkSession: SparkSession)

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##: Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. val aggregationParallelism: Int
  5. def allPartitions(tableName: String, partitionColumnsFilter: Seq[String] = Seq.empty): Seq[Map[String, String]]
  6. def alterTableProperties(tableName: String, properties: Map[String, String], unsetProperties: Seq[String] = Seq()): Unit
  7. def archiveOrDropTableIfExists(tableName: String, timestamp: Option[Instant]): Unit
  8. def archiveTableIfExists(tableName: String, timestamp: Option[Instant]): Unit
  9. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  10. val backfillValidationEnforced: Boolean
  11. val blockingCacheEviction: Boolean
  12. val bloomFilterThreshold: Long
  13. val cacheLevel: Option[StorageLevel]
  14. val cacheLevelString: String
  15. def checkTablePermission(tableName: String, fallbackPartition: String = partitionSpec.before(partitionSpec.at(System.currentTimeMillis())), partitionColOpt: Option[String] = None): Boolean
  16. val chrononAvroSchemaValidation: Boolean
  17. def chunk(partitions: Set[String]): Seq[PartitionRange]
  18. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.CloneNotSupportedException]) @native()
  19. def columnSizeEstimator(dataType: DataType): Long
  20. def createDatabase(database: String): Boolean
  21. def dropPartitionRange(tableName: String, startDate: String, endDate: String, subPartitionFilters: Map[String, String] = Map.empty): Unit
  22. def dropPartitions(tableName: String, partitions: Seq[String], partitionColumn: String = partitionColumn, subPartitionFilters: Map[String, String] = Map.empty): Unit
  23. def dropTableIfExists(tableName: String): Unit
  24. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  25. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.Throwable])
  26. def firstAvailablePartition(tableName: String, subPartitionFilters: Map[String, String] = Map.empty, partitionColOpt: Option[String] = None): Option[String]
  27. final def getClass(): Class[_ <: AnyRef]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  28. def getColumnsFromQuery(query: String): Seq[String]
  29. def getFieldNames(schema: StructType): Seq[String]
  30. def getPartitionColumn(columnOpt: Option[String] = None): String
  31. def getPartitionColumn(q: Query): String
  32. def getSchemaFromTable(tableName: String): StructType
  33. def getTableProperties(tableName: String): Option[Map[String, String]]
  34. def insertPartitions(df: DataFrame, tableName: String, tableProperties: Map[String, String] = null, partitionColumns: Seq[String] = Seq(partitionColumn), saveMode: SaveMode = SaveMode.Overwrite, fileFormat: String = "PARQUET", autoExpand: Boolean = false, stats: Option[DfStats] = None, sortByCols: Seq[String] = Seq.empty): Unit
  35. def insertUnPartitioned(df: DataFrame, tableName: String, tableProperties: Map[String, String] = null, saveMode: SaveMode = SaveMode.Overwrite, fileFormat: String = "PARQUET"): Unit
  36. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  37. def isPartitioned(tableName: String, partitionColOpt: Option[String] = None): Boolean
  38. val joinPartParallelism: Int
  39. def lastAvailablePartition(tableName: String, subPartitionFilters: Map[String, String] = Map.empty, partitionColOpt: Option[String] = None): Option[String]
  40. def loadEntireTable(tableName: String): DataFrame
  41. lazy val logger: Logger
    Annotations
    @transient()
  42. val maxWait: Int
  43. val minWriteShuffleParallelism: Int
  44. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  45. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  46. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  47. val partitionColumn: String
  48. val partitionSpec: PartitionSpec
  49. def partitions(tableName: String, subPartitionsFilter: Map[String, String] = Map.empty, partitionColOpt: Option[String] = None): Seq[String]
  50. def preAggRepartition(rdd: RDD[Row]): RDD[Row]
  51. def preAggRepartition(df: DataFrame): DataFrame
  52. def productElementNames: Iterator[String]
    Definition Classes
    Product
  53. val smallModeNumRowsCutoff: Int
  54. val smallModelEnabled: Boolean
  55. val sparkSession: SparkSession
  56. def sql(query: String): DataFrame
  57. def sqlWithDefaultPartitionColumn(query: String, existingPartitionColumn: String): DataFrame
  58. final def synchronized[T0](arg0: => T0): T0
    Definition Classes
    AnyRef
  59. def tableExists(tableName: String): Boolean
  60. def tableReadFormat(tableName: String): Format
  61. def unfilledRanges(outputTable: String, outputPartitionRange: PartitionRange, inputTables: Option[Seq[String]] = None, inputTableToSubPartitionFiltersMap: Map[String, Map[String, String]] = Map.empty, inputTableToPartitionColumnsMap: Map[String, String] = Map.empty, inputToOutputShift: Int = 0, skipFirstHole: Boolean = true): Option[Seq[PartitionRange]]
  62. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException])
  63. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException])
  64. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException]) @native()
  65. def wrapWithCache[T](opString: String, dataFrame: DataFrame)(func: => T): Try[T]

Deprecated Value Members

  1. def dropPartitionsAfterHole(inputTable: String, outputTable: String, partitionRange: PartitionRange, subPartitionFilters: Map[String, String] = Map.empty): Option[String]
    Annotations
    @deprecated
    Deprecated

Inherited from Serializable

Inherited from Product

Inherited from Equals

Inherited from AnyRef

Inherited from Any

Ungrouped