|
||||||||||
PREV CLASS NEXT CLASS | FRAMES NO FRAMES | |||||||||
SUMMARY: NESTED | FIELD | CONSTR | METHOD | DETAIL: FIELD | CONSTR | METHOD |
java.lang.Objectorg.apache.hadoop.mapred.Task
@InterfaceAudience.LimitedPrivate(value="MapReduce") @InterfaceStability.Unstable public abstract class Task
Base class for tasks.
Nested Class Summary | |
---|---|
static class |
Task.CombineOutputCollector<K,V>
OutputCollector for the combiner. |
static class |
Task.CombinerRunner<K,V>
|
static class |
Task.CombineValuesIterator<KEY,VALUE>
Iterator to return Combined values |
static class |
Task.Counter
Deprecated. Provided for compatibility. Use TaskCounter instead. |
protected static class |
Task.NewCombinerRunner<K,V>
|
protected static class |
Task.OldCombinerRunner<K,V>
|
class |
Task.TaskReporter
|
Field Summary | |
---|---|
protected OutputCommitter |
committer
|
protected JobConf |
conf
|
static long |
DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS
|
protected org.apache.hadoop.io.BytesWritable |
extraData
|
protected Counters.Counter |
failedShuffleCounter
|
protected static String |
FILESYSTEM_COUNTER_GROUP
Name of the FileSystem counters' group |
protected org.apache.hadoop.mapred.Task.GcTimeUpdater |
gcUpdater
|
protected boolean |
jobCleanup
|
protected JobContext |
jobContext
|
protected JobStatus.State |
jobRunStateForCleanup
|
protected boolean |
jobSetup
|
protected org.apache.hadoop.fs.LocalDirAllocator |
lDirAlloc
|
protected MapOutputFile |
mapOutputFile
|
static String |
MERGED_OUTPUT_PREFIX
|
protected Counters.Counter |
mergedMapOutputsCounter
|
protected OutputFormat<?,?> |
outputFormat
|
static int |
PROGRESS_INTERVAL
The number of milliseconds between progress reports. |
protected Counters.Counter |
spilledRecordsCounter
|
protected boolean |
taskCleanup
|
protected TaskAttemptContext |
taskContext
|
protected SecretKey |
tokenSecret
|
protected TaskUmbilicalProtocol |
umbilical
|
Constructor Summary | |
---|---|
Task()
|
|
Task(String jobFile,
TaskAttemptID taskId,
int partition,
int numSlotsRequired)
|
Method Summary | ||
---|---|---|
protected static
|
createReduceContext(Reducer<INKEY,INVALUE,OUTKEY,OUTVALUE> reducer,
org.apache.hadoop.conf.Configuration job,
TaskAttemptID taskId,
RawKeyValueIterator rIter,
Counter inputKeyCounter,
Counter inputValueCounter,
RecordWriter<OUTKEY,OUTVALUE> output,
OutputCommitter committer,
StatusReporter reporter,
org.apache.hadoop.io.RawComparator<INKEY> comparator,
Class<INKEY> keyClass,
Class<INVALUE> valueClass)
|
|
void |
done(TaskUmbilicalProtocol umbilical,
Task.TaskReporter reporter)
|
|
org.apache.hadoop.conf.Configuration |
getConf()
|
|
protected static String[] |
getFileSystemCounterNames(String uriScheme)
Counters to measure the usage of the different file systems. |
|
protected static List<org.apache.hadoop.fs.FileSystem.Statistics> |
getFsStatistics(org.apache.hadoop.fs.Path path,
org.apache.hadoop.conf.Configuration conf)
Gets a handle to the Statistics instance based on the scheme associated with path. |
|
String |
getJobFile()
|
|
JobID |
getJobID()
Get the job name for this task. |
|
SecretKey |
getJobTokenSecret()
Get the job token secret |
|
MapOutputFile |
getMapOutputFile()
|
|
int |
getNumSlotsRequired()
|
|
int |
getPartition()
Get the index of this task within the job. |
|
TaskStatus.Phase |
getPhase()
Return current phase of the task. |
|
org.apache.hadoop.util.Progress |
getProgress()
|
|
org.apache.hadoop.mapred.SortedRanges |
getSkipRanges()
Get skipRanges. |
|
TaskAttemptID |
getTaskID()
|
|
void |
initialize(JobConf job,
JobID id,
Reporter reporter,
boolean useNewApi)
|
|
abstract boolean |
isMapTask()
|
|
boolean |
isSkipping()
Is Task in skipping mode. |
|
protected boolean |
keepTaskFiles(JobConf conf)
|
|
void |
localizeConfiguration(JobConf conf)
Localize the given JobConf to be specific for this task. |
|
static String |
normalizeStatus(String status,
org.apache.hadoop.conf.Configuration conf)
|
|
void |
readFields(DataInput in)
|
|
protected void |
reportFatalError(TaskAttemptID id,
Throwable throwable,
String logMsg)
Report a fatal error to the parent (task) tracker. |
|
protected void |
reportNextRecordRange(TaskUmbilicalProtocol umbilical,
long nextRecIndex)
Reports the next executing record range to TaskTracker. |
|
abstract void |
run(JobConf job,
TaskUmbilicalProtocol umbilical)
Run this task as a part of the named job. |
|
protected void |
runJobCleanupTask(TaskUmbilicalProtocol umbilical,
Task.TaskReporter reporter)
|
|
protected void |
runJobSetupTask(TaskUmbilicalProtocol umbilical,
Task.TaskReporter reporter)
|
|
protected void |
runTaskCleanupTask(TaskUmbilicalProtocol umbilical,
Task.TaskReporter reporter)
|
|
void |
setConf(org.apache.hadoop.conf.Configuration conf)
|
|
void |
setJobFile(String jobFile)
|
|
void |
setJobTokenSecret(SecretKey tokenSecret)
Set the job token secret |
|
protected void |
setPhase(TaskStatus.Phase phase)
Set current phase of the task. |
|
void |
setSkipping(boolean skipping)
Sets whether to run Task in skipping mode. |
|
void |
setSkipRanges(org.apache.hadoop.mapred.SortedRanges skipRanges)
Set skipRanges. |
|
protected void |
setWriteSkipRecs(boolean writeSkipRecs)
Set whether to write skip records. |
|
void |
statusUpdate(TaskUmbilicalProtocol umbilical)
Send a status update to the task tracker |
|
String |
toString()
|
|
protected boolean |
toWriteSkipRecs()
Get whether to write skip records. |
|
void |
write(DataOutput out)
|
Methods inherited from class java.lang.Object |
---|
clone, equals, finalize, getClass, hashCode, notify, notifyAll, wait, wait, wait |
Field Detail |
---|
public static String MERGED_OUTPUT_PREFIX
public static final long DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS
protected static final String FILESYSTEM_COUNTER_GROUP
protected JobStatus.State jobRunStateForCleanup
protected boolean jobCleanup
protected boolean jobSetup
protected boolean taskCleanup
protected org.apache.hadoop.io.BytesWritable extraData
protected JobConf conf
protected MapOutputFile mapOutputFile
protected org.apache.hadoop.fs.LocalDirAllocator lDirAlloc
protected JobContext jobContext
protected TaskAttemptContext taskContext
protected OutputFormat<?,?> outputFormat
protected OutputCommitter committer
protected final Counters.Counter spilledRecordsCounter
protected final Counters.Counter failedShuffleCounter
protected final Counters.Counter mergedMapOutputsCounter
protected TaskUmbilicalProtocol umbilical
protected SecretKey tokenSecret
protected org.apache.hadoop.mapred.Task.GcTimeUpdater gcUpdater
public static final int PROGRESS_INTERVAL
Constructor Detail |
---|
public Task()
public Task(String jobFile, TaskAttemptID taskId, int partition, int numSlotsRequired)
Method Detail |
---|
protected static String[] getFileSystemCounterNames(String uriScheme)
public void setJobFile(String jobFile)
public String getJobFile()
public TaskAttemptID getTaskID()
public int getNumSlotsRequired()
public JobID getJobID()
public void setJobTokenSecret(SecretKey tokenSecret)
tokenSecret
- the secretpublic SecretKey getJobTokenSecret()
public int getPartition()
public TaskStatus.Phase getPhase()
protected void setPhase(TaskStatus.Phase phase)
phase
- task phaseprotected boolean toWriteSkipRecs()
protected void setWriteSkipRecs(boolean writeSkipRecs)
protected void reportFatalError(TaskAttemptID id, Throwable throwable, String logMsg)
protected static List<org.apache.hadoop.fs.FileSystem.Statistics> getFsStatistics(org.apache.hadoop.fs.Path path, org.apache.hadoop.conf.Configuration conf) throws IOException
path
- the path.conf
- the configuration to extract the scheme from if not part of
the path.
IOException
public org.apache.hadoop.mapred.SortedRanges getSkipRanges()
public void setSkipRanges(org.apache.hadoop.mapred.SortedRanges skipRanges)
public boolean isSkipping()
public void setSkipping(boolean skipping)
skipping
- public void write(DataOutput out) throws IOException
write
in interface org.apache.hadoop.io.Writable
IOException
public void readFields(DataInput in) throws IOException
readFields
in interface org.apache.hadoop.io.Writable
IOException
public String toString()
toString
in class Object
public void localizeConfiguration(JobConf conf) throws IOException
IOException
public abstract void run(JobConf job, TaskUmbilicalProtocol umbilical) throws IOException, ClassNotFoundException, InterruptedException
umbilical
- for progress reports
IOException
ClassNotFoundException
InterruptedException
public abstract boolean isMapTask()
public org.apache.hadoop.util.Progress getProgress()
public void initialize(JobConf job, JobID id, Reporter reporter, boolean useNewApi) throws IOException, ClassNotFoundException, InterruptedException
IOException
ClassNotFoundException
InterruptedException
public static String normalizeStatus(String status, org.apache.hadoop.conf.Configuration conf)
protected void reportNextRecordRange(TaskUmbilicalProtocol umbilical, long nextRecIndex) throws IOException
umbilical
- nextRecIndex
- the record index which would be fed next.
IOException
public void done(TaskUmbilicalProtocol umbilical, Task.TaskReporter reporter) throws IOException, InterruptedException
IOException
InterruptedException
public void statusUpdate(TaskUmbilicalProtocol umbilical) throws IOException
umbilical
-
IOException
protected void runTaskCleanupTask(TaskUmbilicalProtocol umbilical, Task.TaskReporter reporter) throws IOException, InterruptedException
IOException
InterruptedException
protected void runJobCleanupTask(TaskUmbilicalProtocol umbilical, Task.TaskReporter reporter) throws IOException, InterruptedException
IOException
InterruptedException
protected boolean keepTaskFiles(JobConf conf)
protected void runJobSetupTask(TaskUmbilicalProtocol umbilical, Task.TaskReporter reporter) throws IOException, InterruptedException
IOException
InterruptedException
public void setConf(org.apache.hadoop.conf.Configuration conf)
setConf
in interface org.apache.hadoop.conf.Configurable
public org.apache.hadoop.conf.Configuration getConf()
getConf
in interface org.apache.hadoop.conf.Configurable
public MapOutputFile getMapOutputFile()
protected static <INKEY,INVALUE,OUTKEY,OUTVALUE> Reducer.Context createReduceContext(Reducer<INKEY,INVALUE,OUTKEY,OUTVALUE> reducer, org.apache.hadoop.conf.Configuration job, TaskAttemptID taskId, RawKeyValueIterator rIter, Counter inputKeyCounter, Counter inputValueCounter, RecordWriter<OUTKEY,OUTVALUE> output, OutputCommitter committer, StatusReporter reporter, org.apache.hadoop.io.RawComparator<INKEY> comparator, Class<INKEY> keyClass, Class<INVALUE> valueClass) throws IOException, InterruptedException
IOException
InterruptedException
|
||||||||||
PREV CLASS NEXT CLASS | FRAMES NO FRAMES | |||||||||
SUMMARY: NESTED | FIELD | CONSTR | METHOD | DETAIL: FIELD | CONSTR | METHOD |