public class CqlBulkOutputFormat
extends org.apache.hadoop.mapreduce.OutputFormat<java.lang.Object,java.util.List<java.nio.ByteBuffer>>
implements org.apache.hadoop.mapred.OutputFormat<java.lang.Object,java.util.List<java.nio.ByteBuffer>>
CqlBulkOutputFormat
acts as a Hadoop-specific
OutputFormat that allows reduce tasks to store keys (and corresponding
bound variable values) as CQL rows (and respective columns) in a given
table.
As is the case with the CqlOutputFormat
,
you need to set the prepared statement in your
Hadoop job Configuration. The CqlConfigHelper
class, through its
ConfigHelper#setOutputPreparedStatement
method, is provided to make this
simple.
you need to set the Keyspace. The ConfigHelper
class, through its
ConfigHelper.setOutputColumnFamily(org.apache.hadoop.conf.Configuration, java.lang.String)
method, is provided to make this
simple.
Modifier and Type | Class and Description |
---|---|
static class |
CqlBulkOutputFormat.NullOutputCommitter |
Constructor and Description |
---|
CqlBulkOutputFormat() |
Modifier and Type | Method and Description |
---|---|
void |
checkOutputSpecs(org.apache.hadoop.fs.FileSystem filesystem,
org.apache.hadoop.mapred.JobConf job)
Deprecated.
|
void |
checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext context) |
static boolean |
getDeleteSourceOnSuccess(org.apache.hadoop.conf.Configuration conf) |
org.apache.hadoop.mapreduce.OutputCommitter |
getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext context) |
CqlBulkRecordWriter |
getRecordWriter(org.apache.hadoop.fs.FileSystem filesystem,
org.apache.hadoop.mapred.JobConf job,
java.lang.String name,
org.apache.hadoop.util.Progressable progress)
Deprecated.
|
CqlBulkRecordWriter |
getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext context)
Get the
RecordWriter for the given task. |
static java.lang.String |
getTableForAlias(org.apache.hadoop.conf.Configuration conf,
java.lang.String alias) |
static java.lang.String |
getTableInsertStatement(org.apache.hadoop.conf.Configuration conf,
java.lang.String columnFamily) |
static java.lang.String |
getTableSchema(org.apache.hadoop.conf.Configuration conf,
java.lang.String columnFamily) |
static void |
setDeleteSourceOnSuccess(org.apache.hadoop.conf.Configuration conf,
boolean deleteSrc) |
static void |
setTableAlias(org.apache.hadoop.conf.Configuration conf,
java.lang.String alias,
java.lang.String columnFamily) |
static void |
setTableInsertStatement(org.apache.hadoop.conf.Configuration conf,
java.lang.String columnFamily,
java.lang.String insertStatement) |
static void |
setTableSchema(org.apache.hadoop.conf.Configuration conf,
java.lang.String columnFamily,
java.lang.String schema) |
@Deprecated public CqlBulkRecordWriter getRecordWriter(org.apache.hadoop.fs.FileSystem filesystem, org.apache.hadoop.mapred.JobConf job, java.lang.String name, org.apache.hadoop.util.Progressable progress) throws java.io.IOException
getRecordWriter
in interface org.apache.hadoop.mapred.OutputFormat<java.lang.Object,java.util.List<java.nio.ByteBuffer>>
java.io.IOException
public CqlBulkRecordWriter getRecordWriter(org.apache.hadoop.mapreduce.TaskAttemptContext context) throws java.io.IOException, java.lang.InterruptedException
RecordWriter
for the given task.getRecordWriter
in class org.apache.hadoop.mapreduce.OutputFormat<java.lang.Object,java.util.List<java.nio.ByteBuffer>>
context
- the information about the current task.RecordWriter
to write the output for the job.java.io.IOException
java.lang.InterruptedException
public void checkOutputSpecs(org.apache.hadoop.mapreduce.JobContext context)
checkOutputSpecs
in class org.apache.hadoop.mapreduce.OutputFormat<java.lang.Object,java.util.List<java.nio.ByteBuffer>>
@Deprecated public void checkOutputSpecs(org.apache.hadoop.fs.FileSystem filesystem, org.apache.hadoop.mapred.JobConf job) throws java.io.IOException
checkOutputSpecs
in interface org.apache.hadoop.mapred.OutputFormat<java.lang.Object,java.util.List<java.nio.ByteBuffer>>
java.io.IOException
public org.apache.hadoop.mapreduce.OutputCommitter getOutputCommitter(org.apache.hadoop.mapreduce.TaskAttemptContext context) throws java.io.IOException, java.lang.InterruptedException
getOutputCommitter
in class org.apache.hadoop.mapreduce.OutputFormat<java.lang.Object,java.util.List<java.nio.ByteBuffer>>
java.io.IOException
java.lang.InterruptedException
public static void setTableSchema(org.apache.hadoop.conf.Configuration conf, java.lang.String columnFamily, java.lang.String schema)
public static void setTableInsertStatement(org.apache.hadoop.conf.Configuration conf, java.lang.String columnFamily, java.lang.String insertStatement)
public static java.lang.String getTableSchema(org.apache.hadoop.conf.Configuration conf, java.lang.String columnFamily)
public static java.lang.String getTableInsertStatement(org.apache.hadoop.conf.Configuration conf, java.lang.String columnFamily)
public static void setDeleteSourceOnSuccess(org.apache.hadoop.conf.Configuration conf, boolean deleteSrc)
public static boolean getDeleteSourceOnSuccess(org.apache.hadoop.conf.Configuration conf)
public static void setTableAlias(org.apache.hadoop.conf.Configuration conf, java.lang.String alias, java.lang.String columnFamily)
public static java.lang.String getTableForAlias(org.apache.hadoop.conf.Configuration conf, java.lang.String alias)
Copyright © 2015 The Apache Software Foundation