org.apache.spark.deploy.yarn

YarnSparkHadoopUtil

class YarnSparkHadoopUtil extends SparkHadoopUtil

Contains util methods to interact with Hadoop from spark.

Linear Supertypes
SparkHadoopUtil, Logging, AnyRef, Any
Ordering
  1. Alphabetic
  2. By inheritance
Inherited
  1. YarnSparkHadoopUtil
  2. SparkHadoopUtil
  3. Logging
  4. AnyRef
  5. Any
  1. Hide All
  2. Show all
Learn more about member selection
Visibility
  1. Public
  2. All

Instance Constructors

  1. new YarnSparkHadoopUtil()

Value Members

  1. final def !=(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  2. final def !=(arg0: Any): Boolean

    Definition Classes
    Any
  3. final def ##(): Int

    Definition Classes
    AnyRef → Any
  4. final def ==(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  5. final def ==(arg0: Any): Boolean

    Definition Classes
    Any
  6. def addCredentials(conf: JobConf): Unit

    Definition Classes
    YarnSparkHadoopUtil → SparkHadoopUtil
  7. def addCurrentUserCredentials(creds: Credentials): Unit

    Definition Classes
    YarnSparkHadoopUtil → SparkHadoopUtil
  8. def addSecretKeyToUserCredentials(key: String, secret: String): Unit

    Definition Classes
    YarnSparkHadoopUtil → SparkHadoopUtil
  9. def appendS3AndSparkHadoopConfigurations(conf: SparkConf, hadoopConf: Configuration): Unit

    Definition Classes
    SparkHadoopUtil
  10. final def asInstanceOf[T0]: T0

    Definition Classes
    Any
  11. def clone(): AnyRef

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  12. val conf: Configuration

    Definition Classes
    SparkHadoopUtil
  13. final def eq(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  14. def equals(arg0: Any): Boolean

    Definition Classes
    AnyRef → Any
  15. def finalize(): Unit

    Attributes
    protected[java.lang]
    Definition Classes
    AnyRef
    Annotations
    @throws( classOf[java.lang.Throwable] )
  16. final def getClass(): Class[_]

    Definition Classes
    AnyRef → Any
  17. def getCurrentUserCredentials(): Credentials

    Definition Classes
    YarnSparkHadoopUtil → SparkHadoopUtil
  18. def getNameNodesToAccess(sparkConf: SparkConf): Set[Path]

    Get the list of namenodes the user may access.

  19. def getSecretKeyFromUserCredentials(key: String): Array[Byte]

    Definition Classes
    YarnSparkHadoopUtil → SparkHadoopUtil
  20. def getTimeFromNowToRenewal(sparkConf: SparkConf, fraction: Double, credentials: Credentials): Long

    Definition Classes
    SparkHadoopUtil
  21. def getTokenRenewer(conf: Configuration): String

  22. def globPath(pattern: Path): Seq[Path]

    Definition Classes
    SparkHadoopUtil
  23. def globPathIfNecessary(pattern: Path): Seq[Path]

    Definition Classes
    SparkHadoopUtil
  24. def hashCode(): Int

    Definition Classes
    AnyRef → Any
  25. def initializeLogIfNecessary(isInterpreter: Boolean): Unit

    Attributes
    protected
    Definition Classes
    Logging
  26. def isGlobPath(pattern: Path): Boolean

    Definition Classes
    SparkHadoopUtil
  27. final def isInstanceOf[T0]: Boolean

    Definition Classes
    Any
  28. def isTraceEnabled(): Boolean

    Attributes
    protected
    Definition Classes
    Logging
  29. def isYarnMode(): Boolean

    Definition Classes
    YarnSparkHadoopUtil → SparkHadoopUtil
  30. def listFilesSorted(remoteFs: FileSystem, dir: Path, prefix: String, exclusionSuffix: String): Array[FileStatus]

    Definition Classes
    SparkHadoopUtil
  31. def listLeafDirStatuses(fs: FileSystem, baseStatus: FileStatus): Seq[FileStatus]

    Definition Classes
    SparkHadoopUtil
  32. def listLeafDirStatuses(fs: FileSystem, basePath: Path): Seq[FileStatus]

    Definition Classes
    SparkHadoopUtil
  33. def listLeafStatuses(fs: FileSystem, baseStatus: FileStatus): Seq[FileStatus]

    Definition Classes
    SparkHadoopUtil
  34. def listLeafStatuses(fs: FileSystem, basePath: Path): Seq[FileStatus]

    Definition Classes
    SparkHadoopUtil
  35. def log: Logger

    Attributes
    protected
    Definition Classes
    Logging
  36. def logDebug(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  37. def logDebug(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  38. def logError(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  39. def logError(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  40. def logInfo(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  41. def logInfo(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  42. def logName: String

    Attributes
    protected
    Definition Classes
    Logging
  43. def logTrace(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  44. def logTrace(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  45. def logWarning(msg: ⇒ String, throwable: Throwable): Unit

    Attributes
    protected
    Definition Classes
    Logging
  46. def logWarning(msg: ⇒ String): Unit

    Attributes
    protected
    Definition Classes
    Logging
  47. def loginUserFromKeytab(principalName: String, keytabFilename: String): Unit

    Definition Classes
    SparkHadoopUtil
  48. final def ne(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  49. def newConfiguration(conf: SparkConf): Configuration

    Definition Classes
    YarnSparkHadoopUtil → SparkHadoopUtil
  50. final def notify(): Unit

    Definition Classes
    AnyRef
  51. final def notifyAll(): Unit

    Definition Classes
    AnyRef
  52. def obtainTokenForHBase(conf: Configuration): Option[Token[TokenIdentifier]]

    Obtain a security token for HBase.

    Obtain a security token for HBase.

    Requirements

    1. "hbase.security.authentication" == "kerberos" 2. The HBase classes HBaseConfiguration and TokenUtil could be loaded and invoked.

    conf

    Hadoop configuration; an HBase configuration is created from this.

    returns

    a token if the requirements were met, None if not.

  53. def obtainTokenForHBase(sparkConf: SparkConf, conf: Configuration, credentials: Credentials): Unit

    Obtain a security token for HBase.

  54. def obtainTokenForHBaseInner(conf: Configuration): Option[Token[TokenIdentifier]]

    Obtain a security token for HBase if "hbase.security.authentication" == "kerberos"

    Obtain a security token for HBase if "hbase.security.authentication" == "kerberos"

    conf

    Hadoop configuration; an HBase configuration is created from this.

    returns

    a token if one was needed

  55. def obtainTokenForHiveMetastore(conf: Configuration): Option[Token[DelegationTokenIdentifier]]

    Obtains token for the Hive metastore, using the current user as the principal.

    Obtains token for the Hive metastore, using the current user as the principal. Some exceptions are caught and downgraded to a log message.

    conf

    hadoop configuration; the Hive configuration will be based on this

    returns

    a token, or None if there's no need for a token (no metastore URI or principal in the config), or if a binding exception was caught and downgraded.

  56. def obtainTokenForHiveMetastore(sparkConf: SparkConf, conf: Configuration, credentials: Credentials): Unit

    Obtains token for the Hive metastore and adds them to the credentials.

  57. def obtainTokensForNamenodes(paths: Set[Path], conf: Configuration, creds: Credentials, renewer: Option[String] = None): Unit

    Obtains tokens for the namenodes passed in and adds them to the credentials.

  58. def runAsSparkUser(func: () ⇒ Unit): Unit

    Definition Classes
    SparkHadoopUtil
  59. def substituteHadoopVariables(text: String, hadoopConf: Configuration): String

    Definition Classes
    SparkHadoopUtil
  60. final def synchronized[T0](arg0: ⇒ T0): T0

    Definition Classes
    AnyRef
  61. def toString(): String

    Definition Classes
    AnyRef → Any
  62. def transferCredentials(source: UserGroupInformation, dest: UserGroupInformation): Unit

    Definition Classes
    YarnSparkHadoopUtil → SparkHadoopUtil
  63. final def wait(): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  64. final def wait(arg0: Long, arg1: Int): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )
  65. final def wait(arg0: Long): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws( ... )

Inherited from SparkHadoopUtil

Inherited from Logging

Inherited from AnyRef

Inherited from Any

Ungrouped