trait WriteOperationV2OrBuilder extends MessageOrBuilder
- Alphabetic
- By Inheritance
- WriteOperationV2OrBuilder
- MessageOrBuilder
- MessageLiteOrBuilder
- AnyRef
- Any
- Hide All
- Show All
- Public
- Protected
Abstract Value Members
- abstract def containsOptions(key: String): Boolean
(Optional) A list of configuration options.
(Optional) A list of configuration options.
map<string, string> options = 5;
- abstract def containsTableProperties(key: String): Boolean
(Optional) A list of table properties.
(Optional) A list of table properties.
map<string, string> table_properties = 6;
- abstract def findInitializationErrors(): List[String]
- Definition Classes
- MessageOrBuilder
- abstract def getAllFields(): Map[FieldDescriptor, AnyRef]
- Definition Classes
- MessageOrBuilder
- abstract def getClusteringColumns(index: Int): String
(Optional) Columns used for clustering the table.
(Optional) Columns used for clustering the table.
repeated string clustering_columns = 9;
- index
The index of the element to return.
- returns
The clusteringColumns at the given index.
- abstract def getClusteringColumnsBytes(index: Int): ByteString
(Optional) Columns used for clustering the table.
(Optional) Columns used for clustering the table.
repeated string clustering_columns = 9;
- index
The index of the value to return.
- returns
The bytes of the clusteringColumns at the given index.
- abstract def getClusteringColumnsCount(): Int
(Optional) Columns used for clustering the table.
(Optional) Columns used for clustering the table.
repeated string clustering_columns = 9;
- returns
The count of clusteringColumns.
- abstract def getClusteringColumnsList(): List[String]
(Optional) Columns used for clustering the table.
(Optional) Columns used for clustering the table.
repeated string clustering_columns = 9;
- returns
A list containing the clusteringColumns.
- abstract def getDefaultInstanceForType(): Message
- Definition Classes
- MessageOrBuilder → MessageLiteOrBuilder
- abstract def getDescriptorForType(): Descriptor
- Definition Classes
- MessageOrBuilder
- abstract def getField(field: FieldDescriptor): AnyRef
- Definition Classes
- MessageOrBuilder
- abstract def getInitializationErrorString(): String
- Definition Classes
- MessageOrBuilder
- abstract def getInput(): Relation
(Required) The output of the `input` relation will be persisted according to the options.
(Required) The output of the `input` relation will be persisted according to the options.
.spark.connect.Relation input = 1;
- returns
The input.
- abstract def getInputOrBuilder(): RelationOrBuilder
(Required) The output of the `input` relation will be persisted according to the options.
(Required) The output of the `input` relation will be persisted according to the options.
.spark.connect.Relation input = 1;
- abstract def getMode(): Mode
(Required) Write mode.
(Required) Write mode.
.spark.connect.WriteOperationV2.Mode mode = 7;
- returns
The mode.
- abstract def getModeValue(): Int
(Required) Write mode.
(Required) Write mode.
.spark.connect.WriteOperationV2.Mode mode = 7;
- returns
The enum numeric value on the wire for mode.
- abstract def getOneofFieldDescriptor(oneof: OneofDescriptor): FieldDescriptor
- Definition Classes
- MessageOrBuilder
- abstract def getOptionsCount(): Int
(Optional) A list of configuration options.
(Optional) A list of configuration options.
map<string, string> options = 5;
- abstract def getOptionsMap(): Map[String, String]
(Optional) A list of configuration options.
(Optional) A list of configuration options.
map<string, string> options = 5;
- abstract def getOptionsOrDefault(key: String, defaultValue: String): String
(Optional) A list of configuration options.
(Optional) A list of configuration options.
map<string, string> options = 5;
- abstract def getOptionsOrThrow(key: String): String
(Optional) A list of configuration options.
(Optional) A list of configuration options.
map<string, string> options = 5;
- abstract def getOverwriteCondition(): Expression
(Optional) A condition for overwrite saving mode
(Optional) A condition for overwrite saving mode
.spark.connect.Expression overwrite_condition = 8;
- returns
The overwriteCondition.
- abstract def getOverwriteConditionOrBuilder(): ExpressionOrBuilder
(Optional) A condition for overwrite saving mode
(Optional) A condition for overwrite saving mode
.spark.connect.Expression overwrite_condition = 8;
- abstract def getPartitioningColumns(index: Int): Expression
(Optional) List of columns for partitioning for output table created by `create`, `createOrReplace`, or `replace`
(Optional) List of columns for partitioning for output table created by `create`, `createOrReplace`, or `replace`
repeated .spark.connect.Expression partitioning_columns = 4;
- abstract def getPartitioningColumnsCount(): Int
(Optional) List of columns for partitioning for output table created by `create`, `createOrReplace`, or `replace`
(Optional) List of columns for partitioning for output table created by `create`, `createOrReplace`, or `replace`
repeated .spark.connect.Expression partitioning_columns = 4;
- abstract def getPartitioningColumnsList(): List[Expression]
(Optional) List of columns for partitioning for output table created by `create`, `createOrReplace`, or `replace`
(Optional) List of columns for partitioning for output table created by `create`, `createOrReplace`, or `replace`
repeated .spark.connect.Expression partitioning_columns = 4;
- abstract def getPartitioningColumnsOrBuilder(index: Int): ExpressionOrBuilder
(Optional) List of columns for partitioning for output table created by `create`, `createOrReplace`, or `replace`
(Optional) List of columns for partitioning for output table created by `create`, `createOrReplace`, or `replace`
repeated .spark.connect.Expression partitioning_columns = 4;
- abstract def getPartitioningColumnsOrBuilderList(): List[_ <: ExpressionOrBuilder]
(Optional) List of columns for partitioning for output table created by `create`, `createOrReplace`, or `replace`
(Optional) List of columns for partitioning for output table created by `create`, `createOrReplace`, or `replace`
repeated .spark.connect.Expression partitioning_columns = 4;
- abstract def getProvider(): String
(Optional) A provider for the underlying output data source. Spark's default catalog supports "parquet", "json", etc.
(Optional) A provider for the underlying output data source. Spark's default catalog supports "parquet", "json", etc.
optional string provider = 3;
- returns
The provider.
- abstract def getProviderBytes(): ByteString
(Optional) A provider for the underlying output data source. Spark's default catalog supports "parquet", "json", etc.
(Optional) A provider for the underlying output data source. Spark's default catalog supports "parquet", "json", etc.
optional string provider = 3;
- returns
The bytes for provider.
- abstract def getRepeatedField(field: FieldDescriptor, index: Int): AnyRef
- Definition Classes
- MessageOrBuilder
- abstract def getRepeatedFieldCount(field: FieldDescriptor): Int
- Definition Classes
- MessageOrBuilder
- abstract def getTableName(): String
(Required) The destination of the write operation must be either a path or a table.
(Required) The destination of the write operation must be either a path or a table.
string table_name = 2;
- returns
The tableName.
- abstract def getTableNameBytes(): ByteString
(Required) The destination of the write operation must be either a path or a table.
(Required) The destination of the write operation must be either a path or a table.
string table_name = 2;
- returns
The bytes for tableName.
- abstract def getTablePropertiesCount(): Int
(Optional) A list of table properties.
(Optional) A list of table properties.
map<string, string> table_properties = 6;
- abstract def getTablePropertiesMap(): Map[String, String]
(Optional) A list of table properties.
(Optional) A list of table properties.
map<string, string> table_properties = 6;
- abstract def getTablePropertiesOrDefault(key: String, defaultValue: String): String
(Optional) A list of table properties.
(Optional) A list of table properties.
map<string, string> table_properties = 6;
- abstract def getTablePropertiesOrThrow(key: String): String
(Optional) A list of table properties.
(Optional) A list of table properties.
map<string, string> table_properties = 6;
- abstract def getUnknownFields(): UnknownFieldSet
- Definition Classes
- MessageOrBuilder
- abstract def hasField(field: FieldDescriptor): Boolean
- Definition Classes
- MessageOrBuilder
- abstract def hasInput(): Boolean
(Required) The output of the `input` relation will be persisted according to the options.
(Required) The output of the `input` relation will be persisted according to the options.
.spark.connect.Relation input = 1;
- returns
Whether the input field is set.
- abstract def hasOneof(oneof: OneofDescriptor): Boolean
- Definition Classes
- MessageOrBuilder
- abstract def hasOverwriteCondition(): Boolean
(Optional) A condition for overwrite saving mode
(Optional) A condition for overwrite saving mode
.spark.connect.Expression overwrite_condition = 8;
- returns
Whether the overwriteCondition field is set.
- abstract def hasProvider(): Boolean
(Optional) A provider for the underlying output data source. Spark's default catalog supports "parquet", "json", etc.
(Optional) A provider for the underlying output data source. Spark's default catalog supports "parquet", "json", etc.
optional string provider = 3;
- returns
Whether the provider field is set.
- abstract def isInitialized(): Boolean
- Definition Classes
- MessageLiteOrBuilder
- abstract def getOptions(): Map[String, String]
Use
#getOptionsMap()
instead.Use
#getOptionsMap()
instead.- Annotations
- @Deprecated
- Deprecated
- abstract def getTableProperties(): Map[String, String]
Use
#getTablePropertiesMap()
instead.Use
#getTablePropertiesMap()
instead.- Annotations
- @Deprecated
- Deprecated
Concrete Value Members
- final def !=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def ##: Int
- Definition Classes
- AnyRef → Any
- final def ==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
- final def asInstanceOf[T0]: T0
- Definition Classes
- Any
- def clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.CloneNotSupportedException]) @IntrinsicCandidate() @native()
- final def eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- def equals(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef → Any
- final def getClass(): Class[_ <: AnyRef]
- Definition Classes
- AnyRef → Any
- Annotations
- @IntrinsicCandidate() @native()
- def hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @IntrinsicCandidate() @native()
- final def isInstanceOf[T0]: Boolean
- Definition Classes
- Any
- final def ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
- final def notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @IntrinsicCandidate() @native()
- final def notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @IntrinsicCandidate() @native()
- final def synchronized[T0](arg0: => T0): T0
- Definition Classes
- AnyRef
- def toString(): String
- Definition Classes
- AnyRef → Any
- final def wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
- final def wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException]) @native()
- final def wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.InterruptedException])
Deprecated Value Members
- def finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws(classOf[java.lang.Throwable]) @Deprecated
- Deprecated
(Since version 9)