|
||||||||||
PREV NEXT | FRAMES NO FRAMES |
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
repeated string diagnostics = 9;
repeated string diagnostics = 1;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
repeated string diagnostics = 9;
repeated string diagnostics = 1;
repeated string diagnostics = 9;
repeated string diagnostics = 1;
ContainerLogAppender
.
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
rpc cancelDelegationToken(.hadoop.common.CancelDelegationTokenRequestProto) returns (.hadoop.common.CancelDelegationTokenResponseProto);
rpc cancelDelegationToken(.hadoop.common.CancelDelegationTokenRequestProto) returns (.hadoop.common.CancelDelegationTokenResponseProto);
ResourceManager
to cancel a
delegation token.ResourceManager
to a cancelDelegationToken
request.repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
optional .hadoop.yarn.ApplicationIdProto app_id = 1;
optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
optional int32 attempt_run_time = 4;
optional float cleanup_progress = 5;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
optional .hadoop.yarn.ContainerIdProto container_id = 3;
optional .hadoop.yarn.ContainerIdProto container_id = 15;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 1;
optional string diagnostic_info = 7;
optional string diagnostics = 12;
repeated string diagnostics = 9;
repeated string diagnostics = 1;
optional string display_name = 2;
optional string display_name = 2;
optional int32 event_id = 5;
optional int64 finish_time = 8;
optional int64 finish_time = 5;
optional int64 finish_time = 5;
optional int32 from_event_id = 2;
optional int32 id = 2;
optional int32 id = 2;
optional int32 id = 3;
optional bool is_uber = 16 [default = false];
optional string jobFile = 13;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional string jobName = 10;
optional .hadoop.mapreduce.JobReportProto job_report = 1;
optional .hadoop.mapreduce.JobStateProto job_state = 2;
optional string key = 1;
optional string key = 1;
optional string map_output_server_address = 3;
optional float map_progress = 3;
optional int32 max_events = 3;
optional string name = 1;
optional string name = 1;
optional string node_manager_host = 4;
optional string node_manager_host = 12;
optional int32 node_manager_http_port = 6;
optional int32 node_manager_http_port = 14;
optional int32 node_manager_port = 5;
optional int32 node_manager_port = 13;
optional .hadoop.mapreduce.PhaseProto phase = 9;
optional float progress = 3;
optional float progress = 3;
optional float reduce_progress = 4;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
optional float setup_progress = 6;
optional int64 shuffle_finish_time = 10;
optional int64 sort_finish_time = 11;
optional int64 start_time = 2;
optional int64 start_time = 7;
optional int64 start_time = 4;
optional int64 start_time = 4;
optional string state_string = 8;
optional .hadoop.mapreduce.TaskAttemptCompletionEventStatusProto status = 2;
optional int64 submit_time = 15;
optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
optional .hadoop.mapreduce.TaskAttemptStateProto task_attempt_state = 2;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskReportProto task_report = 1;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
optional .hadoop.mapreduce.TaskStateProto task_state = 2;
optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
optional string trackingUrl = 11;
optional string user = 9;
optional int64 value = 3;
optional .hadoop.mapreduce.CounterGroupProto value = 2;
optional .hadoop.mapreduce.CounterProto value = 2;
rpc failTaskAttempt(.hadoop.mapreduce.FailTaskAttemptRequestProto) returns (.hadoop.mapreduce.FailTaskAttemptResponseProto);
rpc failTaskAttempt(.hadoop.mapreduce.FailTaskAttemptRequestProto) returns (.hadoop.mapreduce.FailTaskAttemptResponseProto);
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
ResourceManager
and is used to generate
the globally unique JobId
.
optional .hadoop.yarn.ApplicationIdProto app_id = 1;
optional .hadoop.yarn.ApplicationIdProto app_id = 1;
optional .hadoop.yarn.ApplicationIdProto app_id = 1;
optional .hadoop.yarn.ApplicationIdProto app_id = 1;
optional .hadoop.yarn.ApplicationIdProto app_id = 1;
optional .hadoop.yarn.ApplicationIdProto app_id = 1;
optional .hadoop.yarn.ApplicationIdProto app_id = 1;
optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
optional int32 attempt_run_time = 4;
optional int32 attempt_run_time = 4;
optional int32 attempt_run_time = 4;
optional float cleanup_progress = 5;
optional float cleanup_progress = 5;
optional float cleanup_progress = 5;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
optional .hadoop.yarn.ContainerIdProto container_id = 3;
optional .hadoop.yarn.ContainerIdProto container_id = 3;
optional .hadoop.yarn.ContainerIdProto container_id = 3;
optional .hadoop.yarn.ContainerIdProto container_id = 15;
optional .hadoop.yarn.ContainerIdProto container_id = 15;
optional .hadoop.yarn.ContainerIdProto container_id = 15;
optional .hadoop.yarn.ContainerIdProto container_id = 3;
optional .hadoop.yarn.ContainerIdProto container_id = 15;
optional .hadoop.yarn.ContainerIdProto container_id = 3;
optional .hadoop.yarn.ContainerIdProto container_id = 3;
optional .hadoop.yarn.ContainerIdProto container_id = 3;
optional .hadoop.yarn.ContainerIdProto container_id = 15;
optional .hadoop.yarn.ContainerIdProto container_id = 15;
optional .hadoop.yarn.ContainerIdProto container_id = 15;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 1;
optional .hadoop.mapreduce.CountersProto counters = 1;
optional .hadoop.mapreduce.CountersProto counters = 1;
rpc getCounters(.hadoop.mapreduce.GetCountersRequestProto) returns (.hadoop.mapreduce.GetCountersResponseProto);
rpc getCounters(.hadoop.mapreduce.GetCountersRequestProto) returns (.hadoop.mapreduce.GetCountersResponseProto);
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 1;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 1;
optional .hadoop.mapreduce.CountersProto counters = 1;
optional .hadoop.mapreduce.CountersProto counters = 1;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
rpc getDelegationToken(.hadoop.common.GetDelegationTokenRequestProto) returns (.hadoop.common.GetDelegationTokenResponseProto);
rpc getDelegationToken(.hadoop.common.GetDelegationTokenRequestProto) returns (.hadoop.common.GetDelegationTokenResponseProto);
optional string diagnostic_info = 7;
optional string diagnostic_info = 7;
optional string diagnostic_info = 7;
optional string diagnostic_info = 7;
optional string diagnostic_info = 7;
optional string diagnostic_info = 7;
optional string diagnostics = 12;
optional string diagnostics = 12;
optional string diagnostics = 12;
repeated string diagnostics = 9;
repeated string diagnostics = 9;
repeated string diagnostics = 9;
repeated string diagnostics = 1;
repeated string diagnostics = 1;
repeated string diagnostics = 1;
rpc getDiagnostics(.hadoop.mapreduce.GetDiagnosticsRequestProto) returns (.hadoop.mapreduce.GetDiagnosticsResponseProto);
rpc getDiagnostics(.hadoop.mapreduce.GetDiagnosticsRequestProto) returns (.hadoop.mapreduce.GetDiagnosticsResponseProto);
optional string diagnostics = 12;
optional string diagnostics = 12;
optional string diagnostics = 12;
repeated string diagnostics = 9;
repeated string diagnostics = 9;
repeated string diagnostics = 9;
repeated string diagnostics = 1;
repeated string diagnostics = 1;
repeated string diagnostics = 1;
repeated string diagnostics = 9;
repeated string diagnostics = 9;
repeated string diagnostics = 9;
repeated string diagnostics = 1;
repeated string diagnostics = 1;
repeated string diagnostics = 1;
repeated string diagnostics = 9;
repeated string diagnostics = 9;
repeated string diagnostics = 9;
repeated string diagnostics = 1;
repeated string diagnostics = 1;
repeated string diagnostics = 1;
optional string display_name = 2;
optional string display_name = 2;
optional string display_name = 2;
optional string display_name = 2;
optional string display_name = 2;
optional string display_name = 2;
optional string display_name = 2;
optional string display_name = 2;
optional string display_name = 2;
optional string display_name = 2;
optional string display_name = 2;
optional string display_name = 2;
optional int32 event_id = 5;
optional int32 event_id = 5;
optional int32 event_id = 5;
optional int64 finish_time = 8;
optional int64 finish_time = 8;
optional int64 finish_time = 8;
optional int64 finish_time = 5;
optional int64 finish_time = 5;
optional int64 finish_time = 5;
optional int64 finish_time = 5;
optional int64 finish_time = 5;
optional int64 finish_time = 5;
optional int32 from_event_id = 2;
optional int32 from_event_id = 2;
optional int32 from_event_id = 2;
JobId
which is unique for all applications started by a particular instance
of the ResourceManager
.
optional int32 id = 2;
optional int32 id = 2;
optional int32 id = 2;
optional int32 id = 2;
optional int32 id = 2;
optional int32 id = 2;
optional int32 id = 3;
optional int32 id = 3;
optional int32 id = 3;
optional bool is_uber = 16 [default = false];
optional bool is_uber = 16 [default = false];
optional bool is_uber = 16 [default = false];
optional string jobFile = 13;
optional string jobFile = 13;
optional string jobFile = 13;
optional string jobFile = 13;
optional string jobFile = 13;
optional string jobFile = 13;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional string jobName = 10;
optional string jobName = 10;
optional string jobName = 10;
optional string jobName = 10;
optional string jobName = 10;
optional string jobName = 10;
optional .hadoop.mapreduce.JobReportProto job_report = 1;
optional .hadoop.mapreduce.JobReportProto job_report = 1;
optional .hadoop.mapreduce.JobReportProto job_report = 1;
rpc getJobReport(.hadoop.mapreduce.GetJobReportRequestProto) returns (.hadoop.mapreduce.GetJobReportResponseProto);
rpc getJobReport(.hadoop.mapreduce.GetJobReportRequestProto) returns (.hadoop.mapreduce.GetJobReportResponseProto);
optional .hadoop.mapreduce.JobReportProto job_report = 1;
optional .hadoop.mapreduce.JobReportProto job_report = 1;
optional .hadoop.mapreduce.JobReportProto job_report = 1;
optional .hadoop.mapreduce.JobReportProto job_report = 1;
optional .hadoop.mapreduce.JobStateProto job_state = 2;
optional .hadoop.mapreduce.JobStateProto job_state = 2;
optional .hadoop.mapreduce.JobStateProto job_state = 2;
optional string key = 1;
optional string key = 1;
optional string key = 1;
optional string key = 1;
optional string key = 1;
optional string key = 1;
optional string key = 1;
optional string key = 1;
optional string key = 1;
optional string key = 1;
optional string key = 1;
optional string key = 1;
optional string map_output_server_address = 3;
optional string map_output_server_address = 3;
optional string map_output_server_address = 3;
optional string map_output_server_address = 3;
optional string map_output_server_address = 3;
optional string map_output_server_address = 3;
optional float map_progress = 3;
optional float map_progress = 3;
optional float map_progress = 3;
optional int32 max_events = 3;
optional int32 max_events = 3;
optional int32 max_events = 3;
optional string name = 1;
optional string name = 1;
optional string name = 1;
optional string name = 1;
optional string name = 1;
optional string name = 1;
optional string name = 1;
optional string name = 1;
optional string name = 1;
optional string name = 1;
optional string name = 1;
optional string name = 1;
optional string node_manager_host = 4;
optional string node_manager_host = 4;
optional string node_manager_host = 4;
optional string node_manager_host = 12;
optional string node_manager_host = 12;
optional string node_manager_host = 12;
optional string node_manager_host = 4;
optional string node_manager_host = 4;
optional string node_manager_host = 4;
optional string node_manager_host = 12;
optional string node_manager_host = 12;
optional string node_manager_host = 12;
optional int32 node_manager_http_port = 6;
optional int32 node_manager_http_port = 6;
optional int32 node_manager_http_port = 6;
optional int32 node_manager_http_port = 14;
optional int32 node_manager_http_port = 14;
optional int32 node_manager_http_port = 14;
optional int32 node_manager_port = 5;
optional int32 node_manager_port = 5;
optional int32 node_manager_port = 5;
optional int32 node_manager_port = 13;
optional int32 node_manager_port = 13;
optional int32 node_manager_port = 13;
optional .hadoop.mapreduce.PhaseProto phase = 9;
optional .hadoop.mapreduce.PhaseProto phase = 9;
optional .hadoop.mapreduce.PhaseProto phase = 9;
optional float progress = 3;
optional float progress = 3;
optional float progress = 3;
optional float progress = 3;
optional float progress = 3;
optional float progress = 3;
optional float reduce_progress = 4;
optional float reduce_progress = 4;
optional float reduce_progress = 4;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
optional float setup_progress = 6;
optional float setup_progress = 6;
optional float setup_progress = 6;
optional int64 shuffle_finish_time = 10;
optional int64 shuffle_finish_time = 10;
optional int64 shuffle_finish_time = 10;
optional int64 sort_finish_time = 11;
optional int64 sort_finish_time = 11;
optional int64 sort_finish_time = 11;
optional int64 start_time = 2;
optional int64 start_time = 2;
optional int64 start_time = 2;
optional int64 start_time = 7;
optional int64 start_time = 7;
optional int64 start_time = 7;
optional int64 start_time = 4;
optional int64 start_time = 4;
optional int64 start_time = 4;
optional int64 start_time = 4;
optional int64 start_time = 4;
optional int64 start_time = 4;
optional string state_string = 8;
optional string state_string = 8;
optional string state_string = 8;
optional string state_string = 8;
optional string state_string = 8;
optional string state_string = 8;
optional .hadoop.mapreduce.TaskAttemptCompletionEventStatusProto status = 2;
optional .hadoop.mapreduce.TaskAttemptCompletionEventStatusProto status = 2;
optional .hadoop.mapreduce.TaskAttemptCompletionEventStatusProto status = 2;
optional int64 submit_time = 15;
optional int64 submit_time = 15;
optional int64 submit_time = 15;
optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
rpc getTaskAttemptCompletionEvents(.hadoop.mapreduce.GetTaskAttemptCompletionEventsRequestProto) returns (.hadoop.mapreduce.GetTaskAttemptCompletionEventsResponseProto);
rpc getTaskAttemptCompletionEvents(.hadoop.mapreduce.GetTaskAttemptCompletionEventsRequestProto) returns (.hadoop.mapreduce.GetTaskAttemptCompletionEventsResponseProto);
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
rpc getTaskAttemptReport(.hadoop.mapreduce.GetTaskAttemptReportRequestProto) returns (.hadoop.mapreduce.GetTaskAttemptReportResponseProto);
rpc getTaskAttemptReport(.hadoop.mapreduce.GetTaskAttemptReportRequestProto) returns (.hadoop.mapreduce.GetTaskAttemptReportResponseProto);
optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
optional .hadoop.mapreduce.TaskAttemptStateProto task_attempt_state = 2;
optional .hadoop.mapreduce.TaskAttemptStateProto task_attempt_state = 2;
optional .hadoop.mapreduce.TaskAttemptStateProto task_attempt_state = 2;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskReportProto task_report = 1;
optional .hadoop.mapreduce.TaskReportProto task_report = 1;
optional .hadoop.mapreduce.TaskReportProto task_report = 1;
rpc getTaskReport(.hadoop.mapreduce.GetTaskReportRequestProto) returns (.hadoop.mapreduce.GetTaskReportResponseProto);
rpc getTaskReport(.hadoop.mapreduce.GetTaskReportRequestProto) returns (.hadoop.mapreduce.GetTaskReportResponseProto);
optional .hadoop.mapreduce.TaskReportProto task_report = 1;
optional .hadoop.mapreduce.TaskReportProto task_report = 1;
optional .hadoop.mapreduce.TaskReportProto task_report = 1;
optional .hadoop.mapreduce.TaskReportProto task_report = 1;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
rpc getTaskReports(.hadoop.mapreduce.GetTaskReportsRequestProto) returns (.hadoop.mapreduce.GetTaskReportsResponseProto);
rpc getTaskReports(.hadoop.mapreduce.GetTaskReportsRequestProto) returns (.hadoop.mapreduce.GetTaskReportsResponseProto);
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
optional .hadoop.mapreduce.TaskStateProto task_state = 2;
optional .hadoop.mapreduce.TaskStateProto task_state = 2;
optional .hadoop.mapreduce.TaskStateProto task_state = 2;
optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
optional string trackingUrl = 11;
optional string trackingUrl = 11;
optional string trackingUrl = 11;
optional string trackingUrl = 11;
optional string trackingUrl = 11;
optional string trackingUrl = 11;
optional string user = 9;
optional string user = 9;
optional string user = 9;
optional string user = 9;
optional string user = 9;
optional string user = 9;
optional int64 value = 3;
optional int64 value = 3;
optional int64 value = 3;
optional .hadoop.mapreduce.CounterGroupProto value = 2;
optional .hadoop.mapreduce.CounterGroupProto value = 2;
optional .hadoop.mapreduce.CounterGroupProto value = 2;
optional .hadoop.mapreduce.CounterProto value = 2;
optional .hadoop.mapreduce.CounterProto value = 2;
optional .hadoop.mapreduce.CounterProto value = 2;
optional .hadoop.mapreduce.CounterGroupProto value = 2;
optional .hadoop.mapreduce.CounterProto value = 2;
optional .hadoop.mapreduce.CounterGroupProto value = 2;
optional .hadoop.mapreduce.CounterGroupProto value = 2;
optional .hadoop.mapreduce.CounterGroupProto value = 2;
optional .hadoop.mapreduce.CounterProto value = 2;
optional .hadoop.mapreduce.CounterProto value = 2;
optional .hadoop.mapreduce.CounterProto value = 2;
optional .hadoop.yarn.ApplicationIdProto app_id = 1;
optional .hadoop.yarn.ApplicationIdProto app_id = 1;
optional .hadoop.yarn.ApplicationIdProto app_id = 1;
optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
optional int32 attempt_run_time = 4;
optional int32 attempt_run_time = 4;
optional int32 attempt_run_time = 4;
optional float cleanup_progress = 5;
optional float cleanup_progress = 5;
optional float cleanup_progress = 5;
optional .hadoop.yarn.ContainerIdProto container_id = 3;
optional .hadoop.yarn.ContainerIdProto container_id = 3;
optional .hadoop.yarn.ContainerIdProto container_id = 3;
optional .hadoop.yarn.ContainerIdProto container_id = 15;
optional .hadoop.yarn.ContainerIdProto container_id = 15;
optional .hadoop.yarn.ContainerIdProto container_id = 15;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 1;
optional .hadoop.mapreduce.CountersProto counters = 1;
optional .hadoop.mapreduce.CountersProto counters = 1;
optional string diagnostic_info = 7;
optional string diagnostic_info = 7;
optional string diagnostic_info = 7;
optional string diagnostics = 12;
optional string diagnostics = 12;
optional string diagnostics = 12;
optional string display_name = 2;
optional string display_name = 2;
optional string display_name = 2;
optional string display_name = 2;
optional string display_name = 2;
optional string display_name = 2;
optional int32 event_id = 5;
optional int32 event_id = 5;
optional int32 event_id = 5;
optional int64 finish_time = 8;
optional int64 finish_time = 8;
optional int64 finish_time = 8;
optional int64 finish_time = 5;
optional int64 finish_time = 5;
optional int64 finish_time = 5;
optional int64 finish_time = 5;
optional int64 finish_time = 5;
optional int64 finish_time = 5;
optional int32 from_event_id = 2;
optional int32 from_event_id = 2;
optional int32 from_event_id = 2;
optional int32 id = 2;
optional int32 id = 2;
optional int32 id = 2;
optional int32 id = 2;
optional int32 id = 2;
optional int32 id = 2;
optional int32 id = 3;
optional int32 id = 3;
optional int32 id = 3;
optional bool is_uber = 16 [default = false];
optional bool is_uber = 16 [default = false];
optional bool is_uber = 16 [default = false];
optional string jobFile = 13;
optional string jobFile = 13;
optional string jobFile = 13;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional string jobName = 10;
optional string jobName = 10;
optional string jobName = 10;
optional .hadoop.mapreduce.JobReportProto job_report = 1;
optional .hadoop.mapreduce.JobReportProto job_report = 1;
optional .hadoop.mapreduce.JobReportProto job_report = 1;
optional .hadoop.mapreduce.JobStateProto job_state = 2;
optional .hadoop.mapreduce.JobStateProto job_state = 2;
optional .hadoop.mapreduce.JobStateProto job_state = 2;
optional string key = 1;
optional string key = 1;
optional string key = 1;
optional string key = 1;
optional string key = 1;
optional string key = 1;
optional string map_output_server_address = 3;
optional string map_output_server_address = 3;
optional string map_output_server_address = 3;
optional float map_progress = 3;
optional float map_progress = 3;
optional float map_progress = 3;
optional int32 max_events = 3;
optional int32 max_events = 3;
optional int32 max_events = 3;
optional string name = 1;
optional string name = 1;
optional string name = 1;
optional string name = 1;
optional string name = 1;
optional string name = 1;
optional string node_manager_host = 4;
optional string node_manager_host = 4;
optional string node_manager_host = 4;
optional string node_manager_host = 12;
optional string node_manager_host = 12;
optional string node_manager_host = 12;
optional int32 node_manager_http_port = 6;
optional int32 node_manager_http_port = 6;
optional int32 node_manager_http_port = 6;
optional int32 node_manager_http_port = 14;
optional int32 node_manager_http_port = 14;
optional int32 node_manager_http_port = 14;
optional int32 node_manager_port = 5;
optional int32 node_manager_port = 5;
optional int32 node_manager_port = 5;
optional int32 node_manager_port = 13;
optional int32 node_manager_port = 13;
optional int32 node_manager_port = 13;
optional .hadoop.mapreduce.PhaseProto phase = 9;
optional .hadoop.mapreduce.PhaseProto phase = 9;
optional .hadoop.mapreduce.PhaseProto phase = 9;
optional float progress = 3;
optional float progress = 3;
optional float progress = 3;
optional float progress = 3;
optional float progress = 3;
optional float progress = 3;
optional float reduce_progress = 4;
optional float reduce_progress = 4;
optional float reduce_progress = 4;
optional float setup_progress = 6;
optional float setup_progress = 6;
optional float setup_progress = 6;
optional int64 shuffle_finish_time = 10;
optional int64 shuffle_finish_time = 10;
optional int64 shuffle_finish_time = 10;
optional int64 sort_finish_time = 11;
optional int64 sort_finish_time = 11;
optional int64 sort_finish_time = 11;
optional int64 start_time = 2;
optional int64 start_time = 2;
optional int64 start_time = 2;
optional int64 start_time = 7;
optional int64 start_time = 7;
optional int64 start_time = 7;
optional int64 start_time = 4;
optional int64 start_time = 4;
optional int64 start_time = 4;
optional int64 start_time = 4;
optional int64 start_time = 4;
optional int64 start_time = 4;
optional string state_string = 8;
optional string state_string = 8;
optional string state_string = 8;
optional .hadoop.mapreduce.TaskAttemptCompletionEventStatusProto status = 2;
optional .hadoop.mapreduce.TaskAttemptCompletionEventStatusProto status = 2;
optional .hadoop.mapreduce.TaskAttemptCompletionEventStatusProto status = 2;
optional int64 submit_time = 15;
optional int64 submit_time = 15;
optional int64 submit_time = 15;
optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
optional .hadoop.mapreduce.TaskAttemptStateProto task_attempt_state = 2;
optional .hadoop.mapreduce.TaskAttemptStateProto task_attempt_state = 2;
optional .hadoop.mapreduce.TaskAttemptStateProto task_attempt_state = 2;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskReportProto task_report = 1;
optional .hadoop.mapreduce.TaskReportProto task_report = 1;
optional .hadoop.mapreduce.TaskReportProto task_report = 1;
optional .hadoop.mapreduce.TaskStateProto task_state = 2;
optional .hadoop.mapreduce.TaskStateProto task_state = 2;
optional .hadoop.mapreduce.TaskStateProto task_state = 2;
optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
optional string trackingUrl = 11;
optional string trackingUrl = 11;
optional string trackingUrl = 11;
optional string user = 9;
optional string user = 9;
optional string user = 9;
optional int64 value = 3;
optional int64 value = 3;
optional int64 value = 3;
optional .hadoop.mapreduce.CounterGroupProto value = 2;
optional .hadoop.mapreduce.CounterGroupProto value = 2;
optional .hadoop.mapreduce.CounterGroupProto value = 2;
optional .hadoop.mapreduce.CounterProto value = 2;
optional .hadoop.mapreduce.CounterProto value = 2;
optional .hadoop.mapreduce.CounterProto value = 2;
J_ERROR = 7;
J_FAILED = 5;
J_INITED = 2;
J_KILLED = 6;
J_NEW = 1;
J_RUNNING = 3;
J_SUCCEEDED = 4;
JobId
represents the globally unique
identifier for a MapReduce job.rpc killJob(.hadoop.mapreduce.KillJobRequestProto) returns (.hadoop.mapreduce.KillJobResponseProto);
rpc killJob(.hadoop.mapreduce.KillJobRequestProto) returns (.hadoop.mapreduce.KillJobResponseProto);
UnsupportedOperationException
rpc killTask(.hadoop.mapreduce.KillTaskRequestProto) returns (.hadoop.mapreduce.KillTaskResponseProto);
rpc killTask(.hadoop.mapreduce.KillTaskRequestProto) returns (.hadoop.mapreduce.KillTaskResponseProto);
rpc killTaskAttempt(.hadoop.mapreduce.KillTaskAttemptRequestProto) returns (.hadoop.mapreduce.KillTaskAttemptResponseProto);
rpc killTaskAttempt(.hadoop.mapreduce.KillTaskAttemptRequestProto) returns (.hadoop.mapreduce.KillTaskAttemptResponseProto);
MAP = 1;
optional .hadoop.yarn.ApplicationIdProto app_id = 1;
optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
optional .hadoop.yarn.ContainerIdProto container_id = 3;
optional .hadoop.yarn.ContainerIdProto container_id = 15;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobReportProto job_report = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskReportProto task_report = 1;
optional .hadoop.mapreduce.CounterGroupProto value = 2;
optional .hadoop.mapreduce.CounterProto value = 2;
hadoop.mapreduce.MRClientProtocolService
TokenIdentifier
that identifies delegation tokens
issued by JobHistoryServer to delegate
MR tasks talking to the JobHistoryServer.hadoop.mapreduce.AMInfoProto
hadoop.mapreduce.AMInfoProto
hadoop.mapreduce.CounterGroupProto
hadoop.mapreduce.CounterGroupProto
hadoop.mapreduce.CounterProto
hadoop.mapreduce.CounterProto
hadoop.mapreduce.CountersProto
hadoop.mapreduce.CountersProto
hadoop.mapreduce.JobIdProto
hadoop.mapreduce.JobIdProto
hadoop.mapreduce.JobReportProto
hadoop.mapreduce.JobReportProto
hadoop.mapreduce.JobStateProto
hadoop.mapreduce.PhaseProto
hadoop.mapreduce.StringCounterGroupMapProto
hadoop.mapreduce.StringCounterGroupMapProto
hadoop.mapreduce.StringCounterMapProto
hadoop.mapreduce.StringCounterMapProto
hadoop.mapreduce.TaskAttemptCompletionEventProto
hadoop.mapreduce.TaskAttemptCompletionEventProto
hadoop.mapreduce.TaskAttemptCompletionEventStatusProto
hadoop.mapreduce.TaskAttemptIdProto
hadoop.mapreduce.TaskAttemptIdProto
hadoop.mapreduce.TaskAttemptReportProto
hadoop.mapreduce.TaskAttemptReportProto
hadoop.mapreduce.TaskAttemptStateProto
hadoop.mapreduce.TaskIdProto
hadoop.mapreduce.TaskIdProto
hadoop.mapreduce.TaskReportProto
hadoop.mapreduce.TaskReportProto
hadoop.mapreduce.TaskStateProto
hadoop.mapreduce.TaskTypeProto
hadoop.mapreduce.FailTaskAttemptRequestProto
hadoop.mapreduce.FailTaskAttemptRequestProto
hadoop.mapreduce.FailTaskAttemptResponseProto
hadoop.mapreduce.FailTaskAttemptResponseProto
hadoop.mapreduce.GetCountersRequestProto
hadoop.mapreduce.GetCountersRequestProto
hadoop.mapreduce.GetCountersResponseProto
hadoop.mapreduce.GetCountersResponseProto
hadoop.mapreduce.GetDiagnosticsRequestProto
hadoop.mapreduce.GetDiagnosticsRequestProto
hadoop.mapreduce.GetDiagnosticsResponseProto
hadoop.mapreduce.GetDiagnosticsResponseProto
hadoop.mapreduce.GetJobReportRequestProto
hadoop.mapreduce.GetJobReportRequestProto
hadoop.mapreduce.GetJobReportResponseProto
hadoop.mapreduce.GetJobReportResponseProto
hadoop.mapreduce.GetTaskAttemptCompletionEventsRequestProto
hadoop.mapreduce.GetTaskAttemptCompletionEventsRequestProto
hadoop.mapreduce.GetTaskAttemptCompletionEventsResponseProto
hadoop.mapreduce.GetTaskAttemptCompletionEventsResponseProto
hadoop.mapreduce.GetTaskAttemptReportRequestProto
hadoop.mapreduce.GetTaskAttemptReportRequestProto
hadoop.mapreduce.GetTaskAttemptReportResponseProto
hadoop.mapreduce.GetTaskAttemptReportResponseProto
hadoop.mapreduce.GetTaskReportRequestProto
hadoop.mapreduce.GetTaskReportRequestProto
hadoop.mapreduce.GetTaskReportResponseProto
hadoop.mapreduce.GetTaskReportResponseProto
hadoop.mapreduce.GetTaskReportsRequestProto
hadoop.mapreduce.GetTaskReportsRequestProto
hadoop.mapreduce.GetTaskReportsResponseProto
hadoop.mapreduce.GetTaskReportsResponseProto
hadoop.mapreduce.KillJobRequestProto
hadoop.mapreduce.KillJobRequestProto
hadoop.mapreduce.KillJobResponseProto
hadoop.mapreduce.KillJobResponseProto
hadoop.mapreduce.KillTaskAttemptRequestProto
hadoop.mapreduce.KillTaskAttemptRequestProto
hadoop.mapreduce.KillTaskAttemptResponseProto
hadoop.mapreduce.KillTaskAttemptResponseProto
hadoop.mapreduce.KillTaskRequestProto
hadoop.mapreduce.KillTaskRequestProto
hadoop.mapreduce.KillTaskResponseProto
hadoop.mapreduce.KillTaskResponseProto
P_CLEANUP = 6;
P_MAP = 2;
P_REDUCE = 5;
P_SHUFFLE = 3;
P_SORT = 4;
P_STARTING = 1;
REDUCE = 2;
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
rpc renewDelegationToken(.hadoop.common.RenewDelegationTokenRequestProto) returns (.hadoop.common.RenewDelegationTokenResponseProto);
rpc renewDelegationToken(.hadoop.common.RenewDelegationTokenRequestProto) returns (.hadoop.common.RenewDelegationTokenResponseProto);
ResourceManager
.ResourceManager
.repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
repeated .hadoop.mapreduce.AMInfoProto am_infos = 14;
optional .hadoop.yarn.ApplicationIdProto app_id = 1;
optional .hadoop.yarn.ApplicationIdProto app_id = 1;
optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto attempt_id = 1;
optional int32 attempt_run_time = 4;
optional float cleanup_progress = 5;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
repeated .hadoop.mapreduce.TaskAttemptCompletionEventProto completion_events = 1;
optional .hadoop.yarn.ContainerIdProto container_id = 3;
optional .hadoop.yarn.ContainerIdProto container_id = 3;
optional .hadoop.yarn.ContainerIdProto container_id = 15;
optional .hadoop.yarn.ContainerIdProto container_id = 15;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterGroupMapProto counter_groups = 1;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
repeated .hadoop.mapreduce.StringCounterMapProto counters = 3;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 6;
optional .hadoop.mapreduce.CountersProto counters = 1;
optional .hadoop.mapreduce.CountersProto counters = 1;
optional string diagnostic_info = 7;
optional string diagnostic_info = 7;
optional string diagnostics = 12;
repeated string diagnostics = 9;
repeated string diagnostics = 1;
optional string diagnostics = 12;
optional string display_name = 2;
optional string display_name = 2;
optional string display_name = 2;
optional string display_name = 2;
optional int32 event_id = 5;
optional int64 finish_time = 8;
optional int64 finish_time = 5;
optional int64 finish_time = 5;
optional int32 from_event_id = 2;
optional int32 id = 2;
optional int32 id = 2;
optional int32 id = 3;
optional bool is_uber = 16 [default = false];
ApplicationClassLoader
on the given configuration and as
the context classloader, if
MRJobConfig.MAPREDUCE_JOB_CLASSLOADER
is set to true, and
the APP_CLASSPATH environment variable is set.
optional string jobFile = 13;
optional string jobFile = 13;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional .hadoop.mapreduce.JobIdProto job_id = 1;
optional string jobName = 10;
optional string jobName = 10;
optional .hadoop.mapreduce.JobReportProto job_report = 1;
optional .hadoop.mapreduce.JobReportProto job_report = 1;
optional .hadoop.mapreduce.JobStateProto job_state = 2;
optional string key = 1;
optional string key = 1;
optional string key = 1;
optional string key = 1;
optional string map_output_server_address = 3;
optional string map_output_server_address = 3;
optional float map_progress = 3;
optional int32 max_events = 3;
optional string name = 1;
optional string name = 1;
optional string name = 1;
optional string name = 1;
optional string node_manager_host = 4;
optional string node_manager_host = 12;
optional string node_manager_host = 4;
optional string node_manager_host = 12;
optional int32 node_manager_http_port = 6;
optional int32 node_manager_http_port = 14;
optional int32 node_manager_port = 5;
optional int32 node_manager_port = 13;
optional .hadoop.mapreduce.PhaseProto phase = 9;
optional float progress = 3;
optional float progress = 3;
optional float reduce_progress = 4;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
repeated .hadoop.mapreduce.TaskAttemptIdProto running_attempts = 7;
optional float setup_progress = 6;
optional int64 shuffle_finish_time = 10;
optional int64 sort_finish_time = 11;
optional int64 start_time = 2;
optional int64 start_time = 7;
optional int64 start_time = 4;
optional int64 start_time = 4;
optional string state_string = 8;
optional string state_string = 8;
optional .hadoop.mapreduce.TaskAttemptCompletionEventStatusProto status = 2;
optional int64 submit_time = 15;
optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
optional .hadoop.mapreduce.TaskAttemptIdProto successful_attempt = 8;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptIdProto task_attempt_id = 1;
optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
optional .hadoop.mapreduce.TaskAttemptReportProto task_attempt_report = 1;
optional .hadoop.mapreduce.TaskAttemptStateProto task_attempt_state = 2;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskIdProto task_id = 1;
optional .hadoop.mapreduce.TaskReportProto task_report = 1;
optional .hadoop.mapreduce.TaskReportProto task_report = 1;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
repeated .hadoop.mapreduce.TaskReportProto task_reports = 1;
optional .hadoop.mapreduce.TaskStateProto task_state = 2;
optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
optional .hadoop.mapreduce.TaskTypeProto task_type = 2;
optional string trackingUrl = 11;
optional string trackingUrl = 11;
optional string user = 9;
optional string user = 9;
optional int64 value = 3;
optional .hadoop.mapreduce.CounterGroupProto value = 2;
optional .hadoop.mapreduce.CounterGroupProto value = 2;
optional .hadoop.mapreduce.CounterProto value = 2;
optional .hadoop.mapreduce.CounterProto value = 2;
TA_COMMIT_PENDING = 4;
TA_FAILED = 6;
TA_KILLED = 7;
TA_NEW = 1;
TA_RUNNING = 3;
TA_STARTING = 2;
TA_SUCCEEDED = 5;
TACE_FAILED = 1;
TACE_KILLED = 2;
TACE_OBSOLETE = 4;
TACE_SUCCEEDED = 3;
TACE_TIPFAILED = 5;
TaskAttemptId
represents the unique identifier for a task
attempt.TaskId
represents the unique identifier for a Map or Reduce
Task.TS_FAILED = 5;
TS_KILLED = 6;
TS_NEW = 1;
TS_RUNNING = 3;
TS_SCHEDULED = 2;
TS_SUCCEEDED = 4;
|
||||||||||
PREV NEXT | FRAMES NO FRAMES |