public class JsonTextBigQueryInputFormat extends AbstractBigQueryInputFormat<org.apache.hadoop.io.LongWritable,org.apache.hadoop.io.Text>
EXTERNAL_TABLE_TYPE, INPUT_FORMAT_CLASS_KEY
Constructor and Description |
---|
JsonTextBigQueryInputFormat() |
Modifier and Type | Method and Description |
---|---|
org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.LongWritable,org.apache.hadoop.io.Text> |
createDelegateRecordReader(org.apache.hadoop.mapreduce.InputSplit split,
org.apache.hadoop.conf.Configuration configuration)
Create a new record reader for a single input split.
|
ExportFileFormat |
getExportFileFormat()
Get the ExportFileFormat that this input format supports.
|
cleanupJob, cleanupJob, cleanupJob, createRecordReader, createRecordReader, getBigQuery, getBigQueryHelper, getExportFileFormat, getExportFileFormat, getSplits, isShardedExportEnabled, setEnableShardedExport, setInputTable, setInputTable, setTemporaryCloudStorageDirectory
public org.apache.hadoop.mapreduce.RecordReader<org.apache.hadoop.io.LongWritable,org.apache.hadoop.io.Text> createDelegateRecordReader(org.apache.hadoop.mapreduce.InputSplit split, org.apache.hadoop.conf.Configuration configuration) throws java.io.IOException, java.lang.InterruptedException
DelegateRecordReaderFactory
java.io.IOException
java.lang.InterruptedException
public ExportFileFormat getExportFileFormat()
AbstractBigQueryInputFormat
getExportFileFormat
in class AbstractBigQueryInputFormat<org.apache.hadoop.io.LongWritable,org.apache.hadoop.io.Text>
Copyright © 2018. All rights reserved.