public static interface StoreTypes.JobDataOrBuilder
extends com.google.protobuf.MessageOrBuilder
Modifier and Type | Method and Description |
---|---|
boolean |
containsKillTasksSummary(String key)
map<string, int32> kill_tasks_summary = 20; |
long |
getCompletionTime()
optional int64 completion_time = 5; |
String |
getDescription()
optional string description = 3; |
com.google.protobuf.ByteString |
getDescriptionBytes()
optional string description = 3; |
String |
getJobGroup()
optional string job_group = 7; |
com.google.protobuf.ByteString |
getJobGroupBytes()
optional string job_group = 7; |
long |
getJobId()
All IDs are int64 for extendability, even when they are currently int32 in Spark.
|
java.util.Map<String,Integer> |
getKillTasksSummary()
Deprecated.
|
int |
getKillTasksSummaryCount()
map<string, int32> kill_tasks_summary = 20; |
java.util.Map<String,Integer> |
getKillTasksSummaryMap()
map<string, int32> kill_tasks_summary = 20; |
int |
getKillTasksSummaryOrDefault(String key,
int defaultValue)
map<string, int32> kill_tasks_summary = 20; |
int |
getKillTasksSummaryOrThrow(String key)
map<string, int32> kill_tasks_summary = 20; |
String |
getName()
optional string name = 2; |
com.google.protobuf.ByteString |
getNameBytes()
optional string name = 2; |
int |
getNumActiveStages()
int32 num_active_stages = 16; |
int |
getNumActiveTasks()
int32 num_active_tasks = 10; |
int |
getNumCompletedIndices()
int32 num_completed_indices = 15; |
int |
getNumCompletedStages()
int32 num_completed_stages = 17; |
int |
getNumCompletedTasks()
int32 num_completed_tasks = 11; |
int |
getNumFailedStages()
int32 num_failed_stages = 19; |
int |
getNumFailedTasks()
int32 num_failed_tasks = 13; |
int |
getNumKilledTasks()
int32 num_killed_tasks = 14; |
int |
getNumSkippedStages()
int32 num_skipped_stages = 18; |
int |
getNumSkippedTasks()
int32 num_skipped_tasks = 12; |
int |
getNumTasks()
int32 num_tasks = 9; |
long |
getStageIds(int index)
repeated int64 stage_ids = 6; |
int |
getStageIdsCount()
repeated int64 stage_ids = 6; |
java.util.List<Long> |
getStageIdsList()
repeated int64 stage_ids = 6; |
StoreTypes.JobExecutionStatus |
getStatus()
.org.apache.spark.status.protobuf.JobExecutionStatus status = 8; |
int |
getStatusValue()
.org.apache.spark.status.protobuf.JobExecutionStatus status = 8; |
long |
getSubmissionTime()
optional int64 submission_time = 4; |
boolean |
hasCompletionTime()
optional int64 completion_time = 5; |
boolean |
hasDescription()
optional string description = 3; |
boolean |
hasJobGroup()
optional string job_group = 7; |
boolean |
hasName()
optional string name = 2; |
boolean |
hasSubmissionTime()
optional int64 submission_time = 4; |
findInitializationErrors, getAllFields, getDefaultInstanceForType, getDescriptorForType, getField, getInitializationErrorString, getOneofFieldDescriptor, getRepeatedField, getRepeatedFieldCount, getUnknownFields, hasField, hasOneof
long getJobId()
All IDs are int64 for extendability, even when they are currently int32 in Spark.
int64 job_id = 1;
boolean hasName()
optional string name = 2;
String getName()
optional string name = 2;
com.google.protobuf.ByteString getNameBytes()
optional string name = 2;
boolean hasDescription()
optional string description = 3;
String getDescription()
optional string description = 3;
com.google.protobuf.ByteString getDescriptionBytes()
optional string description = 3;
boolean hasSubmissionTime()
optional int64 submission_time = 4;
long getSubmissionTime()
optional int64 submission_time = 4;
boolean hasCompletionTime()
optional int64 completion_time = 5;
long getCompletionTime()
optional int64 completion_time = 5;
java.util.List<Long> getStageIdsList()
repeated int64 stage_ids = 6;
int getStageIdsCount()
repeated int64 stage_ids = 6;
long getStageIds(int index)
repeated int64 stage_ids = 6;
index
- The index of the element to return.boolean hasJobGroup()
optional string job_group = 7;
String getJobGroup()
optional string job_group = 7;
com.google.protobuf.ByteString getJobGroupBytes()
optional string job_group = 7;
int getStatusValue()
.org.apache.spark.status.protobuf.JobExecutionStatus status = 8;
StoreTypes.JobExecutionStatus getStatus()
.org.apache.spark.status.protobuf.JobExecutionStatus status = 8;
int getNumTasks()
int32 num_tasks = 9;
int getNumActiveTasks()
int32 num_active_tasks = 10;
int getNumCompletedTasks()
int32 num_completed_tasks = 11;
int getNumSkippedTasks()
int32 num_skipped_tasks = 12;
int getNumFailedTasks()
int32 num_failed_tasks = 13;
int getNumKilledTasks()
int32 num_killed_tasks = 14;
int getNumCompletedIndices()
int32 num_completed_indices = 15;
int getNumActiveStages()
int32 num_active_stages = 16;
int getNumCompletedStages()
int32 num_completed_stages = 17;
int getNumSkippedStages()
int32 num_skipped_stages = 18;
int getNumFailedStages()
int32 num_failed_stages = 19;
int getKillTasksSummaryCount()
map<string, int32> kill_tasks_summary = 20;
boolean containsKillTasksSummary(String key)
map<string, int32> kill_tasks_summary = 20;
@Deprecated java.util.Map<String,Integer> getKillTasksSummary()
getKillTasksSummaryMap()
instead.java.util.Map<String,Integer> getKillTasksSummaryMap()
map<string, int32> kill_tasks_summary = 20;
int getKillTasksSummaryOrDefault(String key, int defaultValue)
map<string, int32> kill_tasks_summary = 20;
int getKillTasksSummaryOrThrow(String key)
map<string, int32> kill_tasks_summary = 20;