Packages

final class WriteOperation extends GeneratedMessage with WriteOperationOrBuilder

As writes are not directly handled during analysis and planning, they are modeled as commands.

Protobuf type spark.connect.WriteOperation

Linear Supertypes
WriteOperationOrBuilder, GeneratedMessage, Serializable, AbstractMessage, Message, MessageOrBuilder, AbstractMessageLite[MessageType, BuilderType], MessageLite, MessageLiteOrBuilder, AnyRef, Any
Ordering
  1. Alphabetic
  2. By Inheritance
Inherited
  1. WriteOperation
  2. WriteOperationOrBuilder
  3. GeneratedMessage
  4. Serializable
  5. AbstractMessage
  6. Message
  7. MessageOrBuilder
  8. AbstractMessageLite
  9. MessageLite
  10. MessageLiteOrBuilder
  11. AnyRef
  12. Any
  1. Hide All
  2. Show All
Visibility
  1. Public
  2. Protected

Value Members

  1. final def !=(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  2. final def ##: Int
    Definition Classes
    AnyRef → Any
  3. final def ==(arg0: Any): Boolean
    Definition Classes
    AnyRef → Any
  4. final def asInstanceOf[T0]: T0
    Definition Classes
    Any
  5. def clone(): AnyRef
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.CloneNotSupportedException]) @IntrinsicCandidate() @native()
  6. def containsOptions(key: String): Boolean

    (Optional) A list of configuration options.
    

    (Optional) A list of configuration options.
    

    map<string, string> options = 9;

    Definition Classes
    WriteOperationWriteOperationOrBuilder
    Annotations
    @Override()
  7. final def eq(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  8. def equals(obj: AnyRef): Boolean
    Definition Classes
    WriteOperation → AbstractMessage → Message → AnyRef → Any
    Annotations
    @Override()
  9. def findInitializationErrors(): List[String]
    Definition Classes
    AbstractMessage → MessageOrBuilder
  10. def getAllFields(): Map[FieldDescriptor, AnyRef]
    Definition Classes
    GeneratedMessage → MessageOrBuilder
  11. def getBucketBy(): BucketBy

    (Optional) Bucketing specification. Bucketing must set the number of buckets and the columns
    to bucket by.
    

    (Optional) Bucketing specification. Bucketing must set the number of buckets and the columns
    to bucket by.
    

    .spark.connect.WriteOperation.BucketBy bucket_by = 8;

    returns

    The bucketBy.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
    Annotations
    @Override()
  12. def getBucketByOrBuilder(): BucketByOrBuilder

    (Optional) Bucketing specification. Bucketing must set the number of buckets and the columns
    to bucket by.
    

    (Optional) Bucketing specification. Bucketing must set the number of buckets and the columns
    to bucket by.
    

    .spark.connect.WriteOperation.BucketBy bucket_by = 8;

    Definition Classes
    WriteOperationWriteOperationOrBuilder
    Annotations
    @Override()
  13. final def getClass(): Class[_ <: AnyRef]
    Definition Classes
    AnyRef → Any
    Annotations
    @IntrinsicCandidate() @native()
  14. def getClusteringColumns(index: Int): String

    (Optional) Columns used for clustering the table.
    

    (Optional) Columns used for clustering the table.
    

    repeated string clustering_columns = 10;

    index

    The index of the element to return.

    returns

    The clusteringColumns at the given index.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
  15. def getClusteringColumnsBytes(index: Int): ByteString

    (Optional) Columns used for clustering the table.
    

    (Optional) Columns used for clustering the table.
    

    repeated string clustering_columns = 10;

    index

    The index of the value to return.

    returns

    The bytes of the clusteringColumns at the given index.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
  16. def getClusteringColumnsCount(): Int

    (Optional) Columns used for clustering the table.
    

    (Optional) Columns used for clustering the table.
    

    repeated string clustering_columns = 10;

    returns

    The count of clusteringColumns.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
  17. def getClusteringColumnsList(): ProtocolStringList

    (Optional) Columns used for clustering the table.
    

    (Optional) Columns used for clustering the table.
    

    repeated string clustering_columns = 10;

    returns

    A list containing the clusteringColumns.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
  18. def getDefaultInstanceForType(): WriteOperation
    Definition Classes
    WriteOperation → MessageOrBuilder → MessageLiteOrBuilder
    Annotations
    @Override()
  19. def getDescriptorForType(): Descriptor
    Definition Classes
    GeneratedMessage → MessageOrBuilder
  20. def getField(field: FieldDescriptor): AnyRef
    Definition Classes
    GeneratedMessage → MessageOrBuilder
  21. def getInitializationErrorString(): String
    Definition Classes
    AbstractMessage → MessageOrBuilder
  22. def getInput(): Relation

    (Required) The output of the `input` relation will be persisted according to the options.
    

    (Required) The output of the `input` relation will be persisted according to the options.
    

    .spark.connect.Relation input = 1;

    returns

    The input.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
    Annotations
    @Override()
  23. def getInputOrBuilder(): RelationOrBuilder

    (Required) The output of the `input` relation will be persisted according to the options.
    

    (Required) The output of the `input` relation will be persisted according to the options.
    

    .spark.connect.Relation input = 1;

    Definition Classes
    WriteOperationWriteOperationOrBuilder
    Annotations
    @Override()
  24. def getMode(): SaveMode

    (Required) the save mode.
    

    (Required) the save mode.
    

    .spark.connect.WriteOperation.SaveMode mode = 5;

    returns

    The mode.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
    Annotations
    @Override()
  25. def getModeValue(): Int

    (Required) the save mode.
    

    (Required) the save mode.
    

    .spark.connect.WriteOperation.SaveMode mode = 5;

    returns

    The enum numeric value on the wire for mode.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
    Annotations
    @Override()
  26. def getOneofFieldDescriptor(oneof: OneofDescriptor): FieldDescriptor
    Definition Classes
    GeneratedMessage → AbstractMessage → MessageOrBuilder
  27. def getOptionsCount(): Int

    (Optional) A list of configuration options.
    

    (Optional) A list of configuration options.
    

    map<string, string> options = 9;

    Definition Classes
    WriteOperationWriteOperationOrBuilder
  28. def getOptionsMap(): Map[String, String]

    (Optional) A list of configuration options.
    

    (Optional) A list of configuration options.
    

    map<string, string> options = 9;

    Definition Classes
    WriteOperationWriteOperationOrBuilder
    Annotations
    @Override()
  29. def getOptionsOrDefault(key: String, defaultValue: String): String

    (Optional) A list of configuration options.
    

    (Optional) A list of configuration options.
    

    map<string, string> options = 9;

    Definition Classes
    WriteOperationWriteOperationOrBuilder
    Annotations
    @Override()
  30. def getOptionsOrThrow(key: String): String

    (Optional) A list of configuration options.
    

    (Optional) A list of configuration options.
    

    map<string, string> options = 9;

    Definition Classes
    WriteOperationWriteOperationOrBuilder
    Annotations
    @Override()
  31. def getParserForType(): Parser[WriteOperation]
    Definition Classes
    WriteOperation → GeneratedMessage → Message → MessageLite
    Annotations
    @Override()
  32. def getPartitioningColumns(index: Int): String

    (Optional) List of columns for partitioning.
    

    (Optional) List of columns for partitioning.
    

    repeated string partitioning_columns = 7;

    index

    The index of the element to return.

    returns

    The partitioningColumns at the given index.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
  33. def getPartitioningColumnsBytes(index: Int): ByteString

    (Optional) List of columns for partitioning.
    

    (Optional) List of columns for partitioning.
    

    repeated string partitioning_columns = 7;

    index

    The index of the value to return.

    returns

    The bytes of the partitioningColumns at the given index.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
  34. def getPartitioningColumnsCount(): Int

    (Optional) List of columns for partitioning.
    

    (Optional) List of columns for partitioning.
    

    repeated string partitioning_columns = 7;

    returns

    The count of partitioningColumns.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
  35. def getPartitioningColumnsList(): ProtocolStringList

    (Optional) List of columns for partitioning.
    

    (Optional) List of columns for partitioning.
    

    repeated string partitioning_columns = 7;

    returns

    A list containing the partitioningColumns.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
  36. def getPath(): String

    string path = 3;

    string path = 3;

    returns

    The path.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
  37. def getPathBytes(): ByteString

    string path = 3;

    string path = 3;

    returns

    The bytes for path.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
  38. def getRepeatedField(field: FieldDescriptor, index: Int): AnyRef
    Definition Classes
    GeneratedMessage → MessageOrBuilder
  39. def getRepeatedFieldCount(field: FieldDescriptor): Int
    Definition Classes
    GeneratedMessage → MessageOrBuilder
  40. def getSaveTypeCase(): SaveTypeCase
  41. def getSerializedSize(): Int
    Definition Classes
    WriteOperation → GeneratedMessage → AbstractMessage → MessageLite
    Annotations
    @Override()
  42. def getSortColumnNames(index: Int): String

    (Optional) List of columns to sort the output by.
    

    (Optional) List of columns to sort the output by.
    

    repeated string sort_column_names = 6;

    index

    The index of the element to return.

    returns

    The sortColumnNames at the given index.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
  43. def getSortColumnNamesBytes(index: Int): ByteString

    (Optional) List of columns to sort the output by.
    

    (Optional) List of columns to sort the output by.
    

    repeated string sort_column_names = 6;

    index

    The index of the value to return.

    returns

    The bytes of the sortColumnNames at the given index.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
  44. def getSortColumnNamesCount(): Int

    (Optional) List of columns to sort the output by.
    

    (Optional) List of columns to sort the output by.
    

    repeated string sort_column_names = 6;

    returns

    The count of sortColumnNames.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
  45. def getSortColumnNamesList(): ProtocolStringList

    (Optional) List of columns to sort the output by.
    

    (Optional) List of columns to sort the output by.
    

    repeated string sort_column_names = 6;

    returns

    A list containing the sortColumnNames.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
  46. def getSource(): String

    (Optional) Format value according to the Spark documentation. Examples are: text, parquet, delta.
    

    (Optional) Format value according to the Spark documentation. Examples are: text, parquet, delta.
    

    optional string source = 2;

    returns

    The source.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
    Annotations
    @Override()
  47. def getSourceBytes(): ByteString

    (Optional) Format value according to the Spark documentation. Examples are: text, parquet, delta.
    

    (Optional) Format value according to the Spark documentation. Examples are: text, parquet, delta.
    

    optional string source = 2;

    returns

    The bytes for source.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
    Annotations
    @Override()
  48. def getTable(): SaveTable

    .spark.connect.WriteOperation.SaveTable table = 4;

    .spark.connect.WriteOperation.SaveTable table = 4;

    returns

    The table.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
    Annotations
    @Override()
  49. def getTableOrBuilder(): SaveTableOrBuilder

    .spark.connect.WriteOperation.SaveTable table = 4;

    .spark.connect.WriteOperation.SaveTable table = 4;

    Definition Classes
    WriteOperationWriteOperationOrBuilder
    Annotations
    @Override()
  50. def getUnknownFields(): UnknownFieldSet
    Definition Classes
    GeneratedMessage → MessageOrBuilder
  51. def hasBucketBy(): Boolean

    (Optional) Bucketing specification. Bucketing must set the number of buckets and the columns
    to bucket by.
    

    (Optional) Bucketing specification. Bucketing must set the number of buckets and the columns
    to bucket by.
    

    .spark.connect.WriteOperation.BucketBy bucket_by = 8;

    returns

    Whether the bucketBy field is set.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
    Annotations
    @Override()
  52. def hasField(field: FieldDescriptor): Boolean
    Definition Classes
    GeneratedMessage → MessageOrBuilder
  53. def hasInput(): Boolean

    (Required) The output of the `input` relation will be persisted according to the options.
    

    (Required) The output of the `input` relation will be persisted according to the options.
    

    .spark.connect.Relation input = 1;

    returns

    Whether the input field is set.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
    Annotations
    @Override()
  54. def hasOneof(oneof: OneofDescriptor): Boolean
    Definition Classes
    GeneratedMessage → AbstractMessage → MessageOrBuilder
  55. def hasPath(): Boolean

    string path = 3;

    string path = 3;

    returns

    Whether the path field is set.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
  56. def hasSource(): Boolean

    (Optional) Format value according to the Spark documentation. Examples are: text, parquet, delta.
    

    (Optional) Format value according to the Spark documentation. Examples are: text, parquet, delta.
    

    optional string source = 2;

    returns

    Whether the source field is set.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
    Annotations
    @Override()
  57. def hasTable(): Boolean

    .spark.connect.WriteOperation.SaveTable table = 4;

    .spark.connect.WriteOperation.SaveTable table = 4;

    returns

    Whether the table field is set.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
    Annotations
    @Override()
  58. def hashCode(): Int
    Definition Classes
    WriteOperation → AbstractMessage → Message → AnyRef → Any
    Annotations
    @Override()
  59. def internalGetFieldAccessorTable(): FieldAccessorTable
    Attributes
    protected[proto]
    Definition Classes
    WriteOperation → GeneratedMessage
    Annotations
    @Override()
  60. def internalGetMapFieldReflection(number: Int): MapFieldReflectionAccessor
    Attributes
    protected[proto]
    Definition Classes
    WriteOperation → GeneratedMessage
    Annotations
    @SuppressWarnings() @Override()
  61. final def isInitialized(): Boolean
    Definition Classes
    WriteOperation → GeneratedMessage → AbstractMessage → MessageLiteOrBuilder
    Annotations
    @Override()
  62. final def isInstanceOf[T0]: Boolean
    Definition Classes
    Any
  63. final def ne(arg0: AnyRef): Boolean
    Definition Classes
    AnyRef
  64. def newBuilderForType(parent: BuilderParent): Builder
    Attributes
    protected[proto]
    Definition Classes
    WriteOperation → AbstractMessage
    Annotations
    @Override()
  65. def newBuilderForType(): Builder
    Definition Classes
    WriteOperation → Message → MessageLite
    Annotations
    @Override()
  66. def newInstance(unused: UnusedPrivateParameter): AnyRef
    Attributes
    protected[protobuf]
    Definition Classes
    GeneratedMessage
  67. final def notify(): Unit
    Definition Classes
    AnyRef
    Annotations
    @IntrinsicCandidate() @native()
  68. final def notifyAll(): Unit
    Definition Classes
    AnyRef
    Annotations
    @IntrinsicCandidate() @native()
  69. def parseUnknownField(input: CodedInputStream, unknownFields: Builder, extensionRegistry: ExtensionRegistryLite, tag: Int): Boolean
    Attributes
    protected[protobuf]
    Definition Classes
    GeneratedMessage
    Annotations
    @throws(classOf[java.io.IOException])
  70. def parseUnknownFieldProto3(input: CodedInputStream, unknownFields: Builder, extensionRegistry: ExtensionRegistryLite, tag: Int): Boolean
    Attributes
    protected[protobuf]
    Definition Classes
    GeneratedMessage
    Annotations
    @throws(classOf[java.io.IOException])
  71. final def synchronized[T0](arg0: => T0): T0
    Definition Classes
    AnyRef
  72. def toBuilder(): Builder
    Definition Classes
    WriteOperation → Message → MessageLite
    Annotations
    @Override()
  73. def toByteArray(): Array[Byte]
    Definition Classes
    AbstractMessageLite → MessageLite
  74. def toByteString(): ByteString
    Definition Classes
    AbstractMessageLite → MessageLite
  75. final def toString(): String
    Definition Classes
    AbstractMessage → Message → AnyRef → Any
  76. final def wait(arg0: Long, arg1: Int): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException])
  77. final def wait(arg0: Long): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException]) @native()
  78. final def wait(): Unit
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.InterruptedException])
  79. def writeDelimitedTo(output: OutputStream): Unit
    Definition Classes
    AbstractMessageLite → MessageLite
    Annotations
    @throws(classOf[java.io.IOException])
  80. def writeReplace(): AnyRef
    Attributes
    protected[protobuf]
    Definition Classes
    GeneratedMessage
    Annotations
    @throws(classOf[java.io.ObjectStreamException])
  81. def writeTo(output: CodedOutputStream): Unit
    Definition Classes
    WriteOperation → GeneratedMessage → AbstractMessage → MessageLite
    Annotations
    @Override()
  82. def writeTo(output: OutputStream): Unit
    Definition Classes
    AbstractMessageLite → MessageLite
    Annotations
    @throws(classOf[java.io.IOException])

Deprecated Value Members

  1. def finalize(): Unit
    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws(classOf[java.lang.Throwable]) @Deprecated
    Deprecated

    (Since version 9)

  2. def getOptions(): Map[String, String]

    Use #getOptionsMap() instead.

    Use #getOptionsMap() instead.

    Definition Classes
    WriteOperationWriteOperationOrBuilder
    Annotations
    @Override() @Deprecated
    Deprecated
  3. def internalGetMapField(fieldNumber: Int): MapField[_ <: AnyRef, _ <: AnyRef]
    Attributes
    protected[protobuf]
    Definition Classes
    GeneratedMessage
    Annotations
    @Deprecated
    Deprecated
  4. def mergeFromAndMakeImmutableInternal(input: CodedInputStream, extensionRegistry: ExtensionRegistryLite): Unit
    Attributes
    protected[protobuf]
    Definition Classes
    GeneratedMessage
    Annotations
    @throws(classOf[com.google.protobuf.InvalidProtocolBufferException]) @Deprecated
    Deprecated

Inherited from GeneratedMessage

Inherited from Serializable

Inherited from AbstractMessage

Inherited from Message

Inherited from MessageOrBuilder

Inherited from AbstractMessageLite[MessageType, BuilderType]

Inherited from MessageLite

Inherited from MessageLiteOrBuilder

Inherited from AnyRef

Inherited from Any

Ungrouped