public class StatsReportListener extends java.lang.Object implements SparkListener, Logging
Constructor and Description |
---|
StatsReportListener() |
Modifier and Type | Method and Description |
---|---|
static scala.Option<org.apache.spark.util.Distribution> |
extractDoubleDistribution(scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics,
scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,scala.Option<java.lang.Object>> getMetric) |
static scala.Option<org.apache.spark.util.Distribution> |
extractLongDistribution(scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics,
scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,scala.Option<java.lang.Object>> getMetric) |
static long |
hours() |
static java.lang.String |
millisToString(long ms)
Reformat a time interval in milliseconds to a prettier format for output
|
static long |
minutes() |
void |
onStageCompleted(SparkListenerStageCompleted stageCompleted)
Called when a stage completes successfully or fails, with information on the completed stage.
|
void |
onTaskEnd(SparkListenerTaskEnd taskEnd)
Called when a task ends
|
static int[] |
percentiles() |
static java.lang.String |
percentilesHeader() |
static double[] |
probabilities() |
static long |
seconds() |
static void |
showBytesDistribution(java.lang.String heading,
scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,scala.Option<java.lang.Object>> getMetric,
scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics) |
static void |
showBytesDistribution(java.lang.String heading,
scala.Option<org.apache.spark.util.Distribution> dOpt) |
static void |
showBytesDistribution(java.lang.String heading,
org.apache.spark.util.Distribution dist) |
static void |
showDistribution(java.lang.String heading,
scala.Option<org.apache.spark.util.Distribution> dOpt,
scala.Function1<java.lang.Object,java.lang.String> formatNumber) |
static void |
showDistribution(java.lang.String heading,
scala.Option<org.apache.spark.util.Distribution> dOpt,
java.lang.String format) |
static void |
showDistribution(java.lang.String heading,
org.apache.spark.util.Distribution d,
scala.Function1<java.lang.Object,java.lang.String> formatNumber) |
static void |
showDistribution(java.lang.String heading,
java.lang.String format,
scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,scala.Option<java.lang.Object>> getMetric,
scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics) |
static void |
showMillisDistribution(java.lang.String heading,
scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,scala.Option<java.lang.Object>> getMetric,
scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics) |
static void |
showMillisDistribution(java.lang.String heading,
scala.Option<org.apache.spark.util.Distribution> dOpt) |
clone, equals, finalize, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
onApplicationEnd, onApplicationStart, onBlockManagerAdded, onBlockManagerRemoved, onBlockUpdated, onEnvironmentUpdate, onExecutorAdded, onExecutorMetricsUpdate, onExecutorRemoved, onJobEnd, onJobStart, onStageSubmitted, onTaskGettingResult, onTaskStart, onUnpersistRDD
initializeIfNecessary, initializeLogging, isTraceEnabled, log_, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning
public static int[] percentiles()
public static double[] probabilities()
public static java.lang.String percentilesHeader()
public static scala.Option<org.apache.spark.util.Distribution> extractDoubleDistribution(scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics, scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,scala.Option<java.lang.Object>> getMetric)
public static scala.Option<org.apache.spark.util.Distribution> extractLongDistribution(scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics, scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,scala.Option<java.lang.Object>> getMetric)
public static void showDistribution(java.lang.String heading, org.apache.spark.util.Distribution d, scala.Function1<java.lang.Object,java.lang.String> formatNumber)
public static void showDistribution(java.lang.String heading, scala.Option<org.apache.spark.util.Distribution> dOpt, scala.Function1<java.lang.Object,java.lang.String> formatNumber)
public static void showDistribution(java.lang.String heading, scala.Option<org.apache.spark.util.Distribution> dOpt, java.lang.String format)
public static void showDistribution(java.lang.String heading, java.lang.String format, scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,scala.Option<java.lang.Object>> getMetric, scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics)
public static void showBytesDistribution(java.lang.String heading, scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,scala.Option<java.lang.Object>> getMetric, scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics)
public static void showBytesDistribution(java.lang.String heading, scala.Option<org.apache.spark.util.Distribution> dOpt)
public static void showBytesDistribution(java.lang.String heading, org.apache.spark.util.Distribution dist)
public static void showMillisDistribution(java.lang.String heading, scala.Option<org.apache.spark.util.Distribution> dOpt)
public static void showMillisDistribution(java.lang.String heading, scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,scala.Option<java.lang.Object>> getMetric, scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics)
public static long seconds()
public static long minutes()
public static long hours()
public static java.lang.String millisToString(long ms)
ms
- (undocumented)public void onTaskEnd(SparkListenerTaskEnd taskEnd)
SparkListener
onTaskEnd
in interface SparkListener
taskEnd
- (undocumented)public void onStageCompleted(SparkListenerStageCompleted stageCompleted)
SparkListener
onStageCompleted
in interface SparkListener
stageCompleted
- (undocumented)