Class

ai.chronon.spark

TableUtils

Related Doc: package spark

Permalink

case class TableUtils(sparkSession: SparkSession) extends Product with Serializable

Linear Supertypes
Serializable, Serializable, Product, Equals, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. TableUtils
  2. Serializable
  3. Serializable
  4. Product
  5. Equals
  6. AnyRef
  7. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Instance Constructors

  1. new TableUtils(sparkSession: SparkSession)

    Permalink

Value Members

  1. final def !=(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int

    Permalink
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean

    Permalink
    Definition Classes
    AnyRef → Any
  4. val aggregationParallelism: Int

    Permalink
  5. def allPartitions(tableName: String, partitionColumnsFilter: Seq[String] = Seq.empty): Seq[Map[String, String]]

    Permalink
  6. def alterTableProperties(tableName: String, properties: Map[String, String], unsetProperties: Seq[String] = Seq()): Unit

    Permalink
  7. def archiveOrDropTableIfExists(tableName: String, timestamp: Option[Instant]): Unit

    Permalink
  8. def archiveTableIfExists(tableName: String, timestamp: Option[Instant]): Unit

    Permalink
  9. final def asInstanceOf[T0]: T0

    Permalink
    Definition Classes
    Any
  10. val backfillValidationEnforced: Boolean

    Permalink
  11. val blockingCacheEviction: Boolean

    Permalink
  12. val bloomFilterThreshold: Long

    Permalink
  13. val cacheLevel: Option[StorageLevel]

    Permalink
  14. val cacheLevelString: String

    Permalink
  15. def checkTablePermission(tableName: String, fallbackPartition: String = ..., partitionColOpt: Option[String] = None): Boolean

    Permalink
  16. def chunk(partitions: Set[String]): Seq[PartitionRange]

    Permalink
  17. def clone(): AnyRef

    Permalink
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  18. def columnSizeEstimator(dataType: DataType): Long

    Permalink
  19. def createDatabase(database: String): Boolean

    Permalink
  20. def dropPartitionRange(tableName: String, startDate: String, endDate: String, subPartitionFilters: Map[String, String] = Map.empty): Unit

    Permalink
  21. def dropPartitions(tableName: String, partitions: Seq[String], partitionColumn: String = partitionColumn, subPartitionFilters: Map[String, String] = Map.empty): Unit

    Permalink
  22. def dropTableIfExists(tableName: String): Unit

    Permalink
  23. final def eq(arg0: AnyRef): Boolean

    Permalink
    Definition Classes
    AnyRef
  24. def finalize(): Unit

    Permalink
    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  25. def firstAvailablePartition(tableName: String, subPartitionFilters: Map[String, String] = Map.empty, partitionColOpt: Option[String] = None): Option[String]

    Permalink
  26. final def getClass(): Class[_]

    Permalink
    Definition Classes
    AnyRef → Any
  27. def getColumnsFromQuery(query: String): Seq[String]

    Permalink
  28. def getFieldNames(schema: StructType): Seq[String]

    Permalink
  29. def getPartitionColumn(columnOpt: Option[String] = None): String

    Permalink
  30. def getPartitionColumn(q: Query): String

    Permalink
  31. def getSchemaFromTable(tableName: String): StructType

    Permalink
  32. def getTableProperties(tableName: String): Option[Map[String, String]]

    Permalink
  33. def insertPartitions(df: DataFrame, tableName: String, tableProperties: Map[String, String] = null, partitionColumns: Seq[String] = Seq(partitionColumn), saveMode: SaveMode = SaveMode.Overwrite, fileFormat: String = "PARQUET", autoExpand: Boolean = false, stats: Option[DfStats] = None, sortByCols: Seq[String] = Seq.empty): Unit

    Permalink
  34. def insertUnPartitioned(df: DataFrame, tableName: String, tableProperties: Map[String, String] = null, saveMode: SaveMode = SaveMode.Overwrite, fileFormat: String = "PARQUET"): Unit

    Permalink
  35. final def isInstanceOf[T0]: Boolean

    Permalink
    Definition Classes
    Any
  36. def isPartitioned(tableName: String, partitionColOpt: Option[String] = None): Boolean

    Permalink
  37. val joinPartParallelism: Int

    Permalink
  38. def lastAvailablePartition(tableName: String, subPartitionFilters: Map[String, String] = Map.empty, partitionColOpt: Option[String] = None): Option[String]

    Permalink
  39. def loadEntireTable(tableName: String): DataFrame

    Permalink
  40. lazy val logger: Logger

    Permalink
  41. val maxWait: Int

    Permalink
  42. val minWriteShuffleParallelism: Int

    Permalink
  43. final def ne(arg0: AnyRef): Boolean

    Permalink
    Definition Classes
    AnyRef
  44. final def notify(): Unit

    Permalink
    Definition Classes
    AnyRef
  45. final def notifyAll(): Unit

    Permalink
    Definition Classes
    AnyRef
  46. val partitionColumn: String

    Permalink
  47. val partitionSpec: PartitionSpec

    Permalink
  48. def partitions(tableName: String, subPartitionsFilter: Map[String, String] = Map.empty, partitionColOpt: Option[String] = None): Seq[String]

    Permalink
  49. def preAggRepartition(rdd: RDD[Row]): RDD[Row]

    Permalink
  50. def preAggRepartition(df: DataFrame): DataFrame

    Permalink
  51. val smallModeNumRowsCutoff: Int

    Permalink
  52. val smallModelEnabled: Boolean

    Permalink
  53. val sparkSession: SparkSession

    Permalink
  54. def sql(query: String): DataFrame

    Permalink
  55. def sqlWithDefaultPartitionColumn(query: String, existingPartitionColumn: String): DataFrame

    Permalink
  56. final def synchronized[T0](arg0: ⇒ T0): T0

    Permalink
    Definition Classes
    AnyRef
  57. def tableExists(tableName: String): Boolean

    Permalink
  58. def tableReadFormat(tableName: String): Format

    Permalink
  59. def unfilledRanges(outputTable: String, outputPartitionRange: PartitionRange, inputTables: Option[Seq[String]] = None, inputTableToSubPartitionFiltersMap: Map[String, Map[String, String]] = Map.empty, inputTableToPartitionColumnsMap: Map[String, String] = Map.empty, inputToOutputShift: Int = 0, skipFirstHole: Boolean = true): Option[Seq[PartitionRange]]

    Permalink
  60. final def wait(): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  61. final def wait(arg0: Long, arg1: Int): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  62. final def wait(arg0: Long): Unit

    Permalink
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  63. def wrapWithCache[T](opString: String, dataFrame: DataFrame)(func: ⇒ T): Try[T]

    Permalink

Deprecated Value Members

  1. def dropPartitionsAfterHole(inputTable: String, outputTable: String, partitionRange: PartitionRange, subPartitionFilters: Map[String, String] = Map.empty): Option[String]

    Permalink
    Annotations
    @deprecated
    Deprecated

Inherited from Serializable

Inherited from Serializable

Inherited from Product

Inherited from Equals

Inherited from AnyRef

Inherited from Any

Ungrouped