o

org.apache.hadoop.hbase.spark

DefaultSourceStaticUtils

object DefaultSourceStaticUtils

Status object to store static functions but also to hold last executed information that can be used for unit testing.

Annotations
@Private()
Linear Supertypes
AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. DefaultSourceStaticUtils
  2. AnyRef
  3. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. All

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##(): Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  5. val byteRange: ThreadLocal[PositionedByteRange]
  6. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()
  7. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  8. def equals(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  9. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  10. def getByteValue(field: Field, value: String): Array[Byte]

    This will convert the value from SparkSQL to be stored into HBase using the right byte Type

    This will convert the value from SparkSQL to be stored into HBase using the right byte Type

    value

    String value from SparkSQL

    returns

    Returns the byte array to go into HBase

  11. final def getClass(): Class[_]
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  12. def getFreshByteRange(bytes: Array[Byte], offset: Int = 0, length: Int): PositionedByteRange
  13. def getFreshByteRange(bytes: Array[Byte]): PositionedByteRange
  14. def getValue(field: Field, r: Result): Any

    This method will convert the result content from HBase into the SQL value type that is requested by the Spark SQL schema definition

    This method will convert the result content from HBase into the SQL value type that is requested by the Spark SQL schema definition

    field

    The structure of the SparkSQL Column

    r

    The result object from HBase

    returns

    The converted object type

  15. def hashCode(): Int
    Definition Classes
    AnyRef → Any
    Annotations
    @native()
  16. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  17. val lastFiveExecutionRules: ConcurrentLinkedQueue[ExecutionRuleForUnitTesting]
  18. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  19. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  20. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @native()
  21. def populateLatestExecutionRules(rowKeyFilter: RowKeyFilter, dynamicLogicExpression: DynamicLogicExpression): Unit

    This method is to populate the lastFiveExecutionRules for unit test perposes This method is not thread safe.

    This method is to populate the lastFiveExecutionRules for unit test perposes This method is not thread safe.

    rowKeyFilter

    The rowKey Filter logic used in the last query

    dynamicLogicExpression

    The dynamicLogicExpression used in the last query

  22. val rawDouble: RawDouble
  23. val rawFloat: RawFloat
  24. val rawInteger: RawInteger
  25. val rawLong: RawLong
  26. val rawString: RawString
  27. final def synchronized[T0](arg0: ⇒ T0): T0
    Definition Classes
    AnyRef
  28. def toString(): String
    Definition Classes
    AnyRef → Any
  29. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  30. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  31. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws( ... ) @native()

Inherited from AnyRef

Inherited from Any

Ungrouped