public class StatsReportListener extends SparkListener implements org.apache.spark.internal.Logging
Constructor and Description |
---|
StatsReportListener() |
Modifier and Type | Method and Description |
---|---|
static scala.Option<org.apache.spark.util.Distribution> |
extractDoubleDistribution(scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics,
scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,Object> getMetric) |
static scala.Option<org.apache.spark.util.Distribution> |
extractLongDistribution(scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics,
scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,Object> getMetric) |
static long |
hours() |
static String |
millisToString(long ms)
Reformat a time interval in milliseconds to a prettier format for output
|
static long |
minutes() |
void |
onStageCompleted(SparkListenerStageCompleted stageCompleted)
Called when a stage completes successfully or fails, with information on the completed stage.
|
void |
onTaskEnd(SparkListenerTaskEnd taskEnd)
Called when a task ends
|
static void |
org$apache$spark$internal$Logging$$log__$eq(org.slf4j.Logger x$1) |
static org.slf4j.Logger |
org$apache$spark$internal$Logging$$log_() |
static int[] |
percentiles() |
static String |
percentilesHeader() |
static double[] |
probabilities() |
static long |
seconds() |
static void |
showBytesDistribution(String heading,
scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,Object> getMetric,
scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics) |
static void |
showBytesDistribution(String heading,
scala.Option<org.apache.spark.util.Distribution> dOpt) |
static void |
showBytesDistribution(String heading,
org.apache.spark.util.Distribution dist) |
static void |
showDistribution(String heading,
scala.Option<org.apache.spark.util.Distribution> dOpt,
scala.Function1<Object,String> formatNumber) |
static void |
showDistribution(String heading,
scala.Option<org.apache.spark.util.Distribution> dOpt,
String format) |
static void |
showDistribution(String heading,
org.apache.spark.util.Distribution d,
scala.Function1<Object,String> formatNumber) |
static void |
showDistribution(String heading,
String format,
scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,Object> getMetric,
scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics) |
static void |
showMillisDistribution(String heading,
scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,Object> getMetric,
scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics) |
static void |
showMillisDistribution(String heading,
scala.Option<org.apache.spark.util.Distribution> dOpt) |
onApplicationEnd, onApplicationStart, onBlockManagerAdded, onBlockManagerRemoved, onBlockUpdated, onEnvironmentUpdate, onExecutorAdded, onExecutorBlacklisted, onExecutorBlacklistedForStage, onExecutorExcluded, onExecutorExcludedForStage, onExecutorMetricsUpdate, onExecutorRemoved, onExecutorUnblacklisted, onExecutorUnexcluded, onJobEnd, onJobStart, onNodeBlacklisted, onNodeBlacklistedForStage, onNodeExcluded, onNodeExcludedForStage, onNodeUnblacklisted, onNodeUnexcluded, onOtherEvent, onResourceProfileAdded, onSpeculativeTaskSubmitted, onStageExecutorMetrics, onStageSubmitted, onTaskGettingResult, onTaskStart, onUnpersistRDD, onUnschedulableTaskSetAdded, onUnschedulableTaskSetRemoved
equals, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
$init$, initializeForcefully, initializeLogIfNecessary, initializeLogIfNecessary, initializeLogIfNecessary$default$2, initLock, isTraceEnabled, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning, org$apache$spark$internal$Logging$$log__$eq, org$apache$spark$internal$Logging$$log_, uninitialize
public static int[] percentiles()
public static double[] probabilities()
public static String percentilesHeader()
public static scala.Option<org.apache.spark.util.Distribution> extractDoubleDistribution(scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics, scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,Object> getMetric)
public static scala.Option<org.apache.spark.util.Distribution> extractLongDistribution(scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics, scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,Object> getMetric)
public static void showDistribution(String heading, org.apache.spark.util.Distribution d, scala.Function1<Object,String> formatNumber)
public static void showDistribution(String heading, scala.Option<org.apache.spark.util.Distribution> dOpt, scala.Function1<Object,String> formatNumber)
public static void showDistribution(String heading, scala.Option<org.apache.spark.util.Distribution> dOpt, String format)
public static void showDistribution(String heading, String format, scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,Object> getMetric, scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics)
public static void showBytesDistribution(String heading, scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,Object> getMetric, scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics)
public static void showBytesDistribution(String heading, scala.Option<org.apache.spark.util.Distribution> dOpt)
public static void showBytesDistribution(String heading, org.apache.spark.util.Distribution dist)
public static void showMillisDistribution(String heading, scala.Option<org.apache.spark.util.Distribution> dOpt)
public static void showMillisDistribution(String heading, scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,Object> getMetric, scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics)
public static long seconds()
public static long minutes()
public static long hours()
public static String millisToString(long ms)
ms
- (undocumented)public static org.slf4j.Logger org$apache$spark$internal$Logging$$log_()
public static void org$apache$spark$internal$Logging$$log__$eq(org.slf4j.Logger x$1)
public void onTaskEnd(SparkListenerTaskEnd taskEnd)
SparkListenerInterface
onTaskEnd
in interface SparkListenerInterface
onTaskEnd
in class SparkListener
taskEnd
- (undocumented)public void onStageCompleted(SparkListenerStageCompleted stageCompleted)
SparkListenerInterface
onStageCompleted
in interface SparkListenerInterface
onStageCompleted
in class SparkListener
stageCompleted
- (undocumented)