object SchemaHelper
- Alphabetic
- By Inheritance
- SchemaHelper
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
val
DAY_TIME_INTERVAL: String("DayTimeIntervalType")
Spark 3.2.0 DataType DayTimeIntervalType's class name.
-
final
val
TIMESTAMP_NTZ: String("TimestampNTZType$")
Spark 3.4.0 DataType TimestampNTZType's class name.
-
final
val
YEAR_MONTH_INTERVAL: String("YearMonthIntervalType")
Spark 3.2.0 DataType YearMonthIntervalType's class name.
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
getColumnSize(sparkType: DataType): Option[Int]
For boolean, numeric and datetime types, it returns the default size of its catalyst type For struct type, when its elements are fixed-size, the summation of all element sizes will be returned.
For boolean, numeric and datetime types, it returns the default size of its catalyst type For struct type, when its elements are fixed-size, the summation of all element sizes will be returned. For array, map, string, and binaries, the column size is variable, return null as unknown.
-
def
getDecimalDigits(sparkType: DataType): Option[Int]
The number of fractional digits for this type.
The number of fractional digits for this type. Null is returned for data types where this is not applicable. For boolean and integrals, the decimal digits is 0 For floating types, we follow the IEEE Standard for Floating-Point Arithmetic (IEEE 754) For timestamp values, we support microseconds For decimals, it returns the scale
- def getNumPrecRadix(typ: DataType): Option[Int]
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
- def toJavaSQLType(sparkType: DataType): Int
-
def
toString(): String
- Definition Classes
- AnyRef → Any
- def toTColumnDesc(field: StructField, pos: Int, timeZone: String): TColumnDesc
- def toTTableSchema(schema: StructType, timeZone: String): TTableSchema
- def toTTypeDesc(typ: DataType, timeZone: String): TTypeDesc
- def toTTypeId(typ: DataType): TTypeId
- def toTTypeQualifiers(typ: DataType, timeZone: String): TTypeQualifiers
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()