%@ page
contentType="text/html; charset=UTF-8"
import="javax.servlet.http.*"
import="java.io.*"
import="java.util.*"
import="org.apache.hadoop.mapred.*"
import="org.apache.hadoop.util.*"
import="java.text.SimpleDateFormat"
import="org.apache.hadoop.mapred.JobHistory.*"
%>
"/>
"/>
<%! static SimpleDateFormat dateFormat = new SimpleDateFormat("d-MMM-yyyy HH:mm:ss") ; %>
<%
String jobid = request.getParameter("jobid");
String jobTrackerId = request.getParameter("jobTrackerId");
JobInfo job = (JobInfo)request.getSession().getAttribute("job");
%>
Hadoop Job <%=jobid %>
User : <%=job.get(Keys.USER) %>
JobName : <%=job.get(Keys.JOBNAME) %>
JobConf : <%=job.get(Keys.JOBCONF) %>
Submitted At : <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLong(Keys.SUBMIT_TIME), 0 ) %>
Launched At : <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLong(Keys.LAUNCH_TIME), job.getLong(Keys.SUBMIT_TIME)) %>
Finished At : <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLong(Keys.FINISH_TIME), job.getLong(Keys.LAUNCH_TIME)) %>
Status : <%= ((job.get(Keys.JOB_STATUS) == null)?"Incomplete" :job.get(Keys.JOB_STATUS)) %>
Analyse This Job
<%
Map tasks = job.getAllTasks();
int totalMaps = 0 ;
int totalReduces = 0;
int failedMaps = 0;
int failedReduces = 0 ;
long mapStarted = 0 ;
long mapFinished = 0 ;
long reduceStarted = 0 ;
long reduceFinished = 0;
for( JobHistory.Task task : tasks.values() ) {
long startTime = task.getLong(Keys.START_TIME) ;
long finishTime = task.getLong(Keys.FINISH_TIME) ;
if( Values.MAP.name().equals(task.get(Keys.TASK_TYPE)) ){
if( mapStarted==0 || mapStarted > startTime ){
mapStarted = startTime;
}
if( mapFinished < finishTime ){
mapFinished = finishTime ;
}
Map attempts = task.getTaskAttempts();
for( TaskAttempt attempt : attempts.values() ) {
totalMaps++;
if( Values.FAILED.name().equals(attempt.get(Keys.TASK_STATUS)) ) {
failedMaps++;
}
}
}else{
if( reduceStarted==0||reduceStarted > startTime ){
reduceStarted = startTime ;
}
if( reduceFinished < finishTime ){
reduceFinished = finishTime;
}
Map attempts = task.getTaskAttempts();
for( TaskAttempt attempt : attempts.values() ) {
totalReduces++;
if( Values.FAILED.name().equals(attempt.get(Keys.TASK_STATUS)) ) {
failedReduces++;
}
}
}
}
%>
<%
DefaultJobHistoryParser.BadNodesFilter filter = new DefaultJobHistoryParser.BadNodesFilter();
String dir = System.getProperty("hadoop.log.dir") + File.separator + "history" ;
JobHistory.parseHistory(new File(dir, jobTrackerId+"_" + jobid), filter);
Map> badNodes = filter.getValues();
if( badNodes.size() > 0 ) {
%>
Failed tasks attempts by nodes
Hostname | Failed Tasks |
<%
for( String node : badNodes.keySet() ) {
Set failedTasks = badNodes.get(node);
%>
<%=node %> |
<%
for( String t : failedTasks ) {
%>
<%=t %>,
<%
}
%>
|
<%
}
}
%>