public class SparkSqlStreamingConfigs$ extends Object
Modifier and Type | Field and Description |
---|---|
static SparkSqlStreamingConfigs$ |
MODULE$
Static reference to the singleton instance of this Scala object.
|
Constructor and Description |
---|
SparkSqlStreamingConfigs$() |
Modifier and Type | Method and Description |
---|---|
String |
constructCommitLogPath(Settings settings)
Determines the location of the streaming commit log.
|
String |
ES_INTERNAL_APP_ID() |
String |
ES_INTERNAL_APP_NAME() |
String |
ES_INTERNAL_QUERY_NAME() |
String |
ES_INTERNAL_SESSION_CHECKPOINT_LOCATION() |
String |
ES_INTERNAL_USER_CHECKPOINT_LOCATION() |
long |
ES_SINK_LOG_CLEANUP_DELAY_DEFAULT() |
String |
ES_SINK_LOG_CLEANUP_DELAY() |
int |
ES_SINK_LOG_COMPACT_INTERVAL_DEFAULT() |
String |
ES_SINK_LOG_COMPACT_INTERVAL() |
boolean |
ES_SINK_LOG_DELETION_DEFAULT() |
String |
ES_SINK_LOG_DELETION() |
boolean |
ES_SINK_LOG_ENABLE_DEFAULT() |
String |
ES_SINK_LOG_ENABLE() |
String |
ES_SINK_LOG_PATH() |
scala.Option<String> |
getAppId(Settings settings)
The ID of the current Spark application, if set
|
scala.Option<String> |
getAppName(Settings settings)
The name of the current Spark application, if set
|
int |
getDefaultCompactInterval(Settings settings) |
long |
getFileCleanupDelayMs(Settings settings)
The number of milliseconds to wait before cleaning up compacted log files
|
boolean |
getIsDeletingExpiredLog(Settings settings) |
scala.Option<String> |
getLogPath(Settings settings)
The log path, if set, is the complete log path to be used for the commit log.
|
scala.Option<String> |
getQueryName(Settings settings)
The name of the current Spark Streaming Query, if set
|
scala.Option<String> |
getSessionCheckpointLocation(Settings settings)
The name of the default session checkpoint location, if set
|
boolean |
getSinkLogEnabled(Settings settings)
Determines if we should use the commit log for writes, or if we should go without one.
|
scala.Option<String> |
getUserSpecifiedCheckpointLocation(Settings settings)
The name of the user specified checkpoint location for the current Spark Streaming Query, if set
|
public static final SparkSqlStreamingConfigs$ MODULE$
public String ES_SINK_LOG_ENABLE()
public boolean ES_SINK_LOG_ENABLE_DEFAULT()
public String ES_SINK_LOG_PATH()
public String ES_INTERNAL_APP_NAME()
public String ES_INTERNAL_APP_ID()
public String ES_INTERNAL_QUERY_NAME()
public String ES_INTERNAL_USER_CHECKPOINT_LOCATION()
public String ES_INTERNAL_SESSION_CHECKPOINT_LOCATION()
public String ES_SINK_LOG_CLEANUP_DELAY()
public long ES_SINK_LOG_CLEANUP_DELAY_DEFAULT()
public String ES_SINK_LOG_DELETION()
public boolean ES_SINK_LOG_DELETION_DEFAULT()
public String ES_SINK_LOG_COMPACT_INTERVAL()
public int ES_SINK_LOG_COMPACT_INTERVAL_DEFAULT()
public boolean getSinkLogEnabled(Settings settings)
settings
- connector settingspublic String constructCommitLogPath(Settings settings)
settings
- connector settingspublic scala.Option<String> getLogPath(Settings settings)
settings
- connector settingspublic scala.Option<String> getAppName(Settings settings)
settings
- connector settingspublic scala.Option<String> getAppId(Settings settings)
settings
- connector settingspublic scala.Option<String> getQueryName(Settings settings)
settings
- connector settingspublic scala.Option<String> getUserSpecifiedCheckpointLocation(Settings settings)
settings
- connector settingspublic scala.Option<String> getSessionCheckpointLocation(Settings settings)
settings
- connector settingspublic long getFileCleanupDelayMs(Settings settings)
settings
- connector settingspublic boolean getIsDeletingExpiredLog(Settings settings)
settings
- connector settingspublic int getDefaultCompactInterval(Settings settings)
settings
- connector settings