org.apache.spark.sql.collection

Utils

object Utils

Linear Supertypes
AnyRef, Any
Ordering
  1. Alphabetic
  2. By inheritance
Inherited
  1. Utils
  2. AnyRef
  3. Any
  1. Hide All
  2. Show all
Learn more about member selection
Visibility
  1. Public
  2. All

Type Members

  1. implicit final class StringExtensions extends AnyVal

Value Members

  1. final def !=(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  2. final def !=(arg0: Any): Boolean

    Definition Classes
    Any
  3. final def ##(): Int

    Definition Classes
    AnyRef → Any
  4. final def ==(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  5. final def ==(arg0: Any): Boolean

    Definition Classes
    Any
  6. def ERROR_NO_QCS(module: String): String

  7. final val WEIGHTAGE_COLUMN_NAME: String("STRATIFIED_SAMPLER_WEIGHTAGE")

  8. final val Z95Percent: Double

  9. final val Z95Squared: Double

  10. def analysisException(msg: String): AnalysisException

  11. final def asInstanceOf[T0]: T0

    Definition Classes
    Any
  12. def classForName(className: String): Class[_]

  13. def clone(): AnyRef

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  14. def columnIndex(col: String, cols: Array[String], module: String): Int

  15. def dataTypeStringBuilder(dataType: DataType, result: StringBuilder): (Any) ⇒ Unit

  16. final def eq(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  17. def equals(arg0: Any): Boolean

    Definition Classes
    AnyRef → Any
  18. def fieldName(f: StructField): String

  19. def fillArray[T](a: Array[_ >: T], v: T, start: Int, endP1: Int): Unit

  20. def finalize(): Unit

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  21. def getAllExecutorsMemoryStatus(sc: SparkContext): Map[BlockManagerId, (Long, Long)]

  22. final def getClass(): Class[_]

    Definition Classes
    AnyRef → Any
  23. def getClientHostPort(netServer: String): String

  24. def getDriverClassName(url: String): String

  25. def getFields(o: Any): Map[String, Any]

  26. def getFields(cols: Array[String], schema: StructType, module: String): Array[StructField]

  27. def getFixedPartitionRDD[T](sc: SparkContext, f: (TaskContext, Partition) ⇒ Iterator[T], partitioner: Partitioner, numPartitions: Int)(implicit arg0: ClassTag[T]): RDD[T]

  28. def getHostExecutorId(blockId: BlockManagerId): String

  29. def getInternalType(dataType: DataType): Class[_]

  30. def getSchemaAndPlanFromBase(schemaOpt: Option[StructType], baseTableOpt: Option[String], catalog: SnappyStoreHiveCatalog, asSelect: Boolean, table: String, tableType: String): (StructType, Option[LogicalPlan])

    Get the result schema given an optional explicit schema and base table.

    Get the result schema given an optional explicit schema and base table. In case both are specified, then check compatibility between the two.

  31. def hasLowerCase(k: String): Boolean

  32. def hashCode(): Int

    Definition Classes
    AnyRef → Any
  33. final def isInstanceOf[T0]: Boolean

    Definition Classes
    Any
  34. final def isLoner(sc: SparkContext): Boolean

  35. def mapExecutors[T](sc: SparkContext, f: (TaskContext, ExecutorLocalPartition) ⇒ Iterator[T])(implicit arg0: ClassTag[T]): RDD[T]

  36. def mapExecutors[T](sqlContext: SQLContext, f: () ⇒ Iterator[T])(implicit arg0: ClassTag[T]): RDD[T]

  37. def matchOption(optName: String, options: Map[String, Any]): Option[(String, Any)]

  38. final def ne(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  39. final def notify(): Unit

    Definition Classes
    AnyRef
  40. final def notifyAll(): Unit

    Definition Classes
    AnyRef
  41. def parseColumn(cv: Any, cols: Array[String], module: String, option: String): Int

  42. def parseDouble(v: Any, module: String, option: String, min: Double, max: Double, exclusive: Boolean = true): Double

  43. def parseInteger(v: Any, module: String, option: String, min: Int = 1, max: Int = Int.MaxValue): Int

  44. def parseTimeInterval(optV: Any, module: String): Long

    Parse the given time interval value as long milliseconds.

    Parse the given time interval value as long milliseconds.

    See also

    timeIntervalSpec for the allowed string specification

  45. def parseTimestamp(ts: String, module: String, col: String): Long

  46. def projectColumns(row: Row, columnIndices: Array[Int], schema: StructType, convertToScalaRow: Boolean): GenericRow

  47. def qcsOf(qa: Array[String], cols: Array[String], module: String): (Array[Int], Array[String])

  48. def registerDriver(driver: String): Unit

    Register given driver class with Spark's loader.

  49. def registerDriverUrl(url: String): String

    Register driver for given JDBC URL and return the driver class name.

  50. def resolveQCS(options: Map[String, Any], fieldNames: Array[String], module: String): (Array[Int], Array[String])

  51. def resolveQCS(qcsV: Option[Any], fieldNames: Array[String], module: String): (Array[Int], Array[String])

  52. def schemaFields(schema: StructType): Map[String, StructField]

  53. final def synchronized[T0](arg0: ⇒ T0): T0

    Definition Classes
    AnyRef
  54. final val timeIntervalSpec: Regex

    string specification for time intervals

  55. def toLowerCase(k: String): String

  56. def toString(): String

    Definition Classes
    AnyRef → Any
  57. def toUpperCase(k: String): String

  58. final def wait(): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  59. final def wait(arg0: Long, arg1: Int): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  60. final def wait(arg0: Long): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  61. def withNewExecutionId[T](ctx: SQLContext, queryExecution: QueryExecution)(body: ⇒ T): T

Inherited from AnyRef

Inherited from Any

Ungrouped