%@ page
contentType="text/html; charset=UTF-8"
import="javax.servlet.http.*"
import="java.io.*"
import="java.util.*"
import="org.apache.hadoop.mapred.*"
import="org.apache.hadoop.util.*"
import="java.text.SimpleDateFormat"
import="org.apache.hadoop.mapred.JobHistory.*"
%>
"/>
"/>
<%! static SimpleDateFormat dateFormat = new SimpleDateFormat("d-MMM-yyyy HH:mm:ss") ; %>
<%
String jobid = request.getParameter("jobid");
String jobTrackerId = request.getParameter("jobTrackerId");
JobInfo job = (JobInfo)request.getSession().getAttribute("job");
%>
Hadoop Job <%=jobid %>
User: <%=job.get(Keys.USER) %>
JobName: <%=job.get(Keys.JOBNAME) %>
JobConf:
<%=job.get(Keys.JOBCONF) %>
Submitted At: <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLong(Keys.SUBMIT_TIME), 0 ) %>
Launched At: <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLong(Keys.LAUNCH_TIME), job.getLong(Keys.SUBMIT_TIME)) %>
Finished At: <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLong(Keys.FINISH_TIME), job.getLong(Keys.LAUNCH_TIME)) %>
Status: <%= ((job.get(Keys.JOB_STATUS) == null)?"Incomplete" :job.get(Keys.JOB_STATUS)) %>
<%
Map tasks = job.getAllTasks();
int totalMaps = 0 ;
int totalReduces = 0;
int failedMaps = 0;
int killedMaps = 0;
int failedReduces = 0 ;
int killedReduces = 0;
long mapStarted = 0 ;
long mapFinished = 0 ;
long reduceStarted = 0 ;
long reduceFinished = 0;
Map allHosts = new TreeMap();
for( JobHistory.Task task : tasks.values() ) {
long startTime = task.getLong(Keys.START_TIME) ;
long finishTime = task.getLong(Keys.FINISH_TIME) ;
allHosts.put(task.get(Keys.HOSTNAME), null);
if( Values.MAP.name().equals(task.get(Keys.TASK_TYPE)) ){
if( mapStarted==0 || mapStarted > startTime ){
mapStarted = startTime;
}
if( mapFinished < finishTime ){
mapFinished = finishTime ;
}
Map attempts = task.getTaskAttempts();
for( TaskAttempt attempt : attempts.values() ) {
totalMaps++;
if( Values.FAILED.name().equals(attempt.get(Keys.TASK_STATUS)) ) {
failedMaps++;
}
if( Values.KILLED.name().equals(attempt.get(Keys.TASK_STATUS)) ) {
killedMaps++;
}
}
}else{
if( reduceStarted==0||reduceStarted > startTime ){
reduceStarted = startTime ;
}
if( reduceFinished < finishTime ){
reduceFinished = finishTime;
}
Map attempts = task.getTaskAttempts();
for( TaskAttempt attempt : attempts.values() ) {
totalReduces++;
if( Values.FAILED.name().equals(attempt.get(Keys.TASK_STATUS)) ) {
failedReduces++;
}
if( Values.KILLED.name().equals(attempt.get(Keys.TASK_STATUS)) ) {
killedReduces++;
}
}
}
}
%>
Number of nodes used: <%=allHosts.size() %>
Analyse This Job
<%
DefaultJobHistoryParser.FailedOnNodesFilter filter = new DefaultJobHistoryParser.FailedOnNodesFilter();
String dir = System.getProperty("hadoop.log.dir") + File.separator + "history" ;
JobHistory.parseHistory(new File(dir, jobTrackerId+"_" + jobid), filter);
Map> badNodes = filter.getValues();
if( badNodes.size() > 0 ) {
%>
Failed tasks attempts by nodes
Hostname | Failed Tasks |
<%
for (Map.Entry> entry : badNodes.entrySet()) {
String node = entry.getKey();
Set failedTasks = entry.getValue();
%>
<%=node %> |
<%
for( String t : failedTasks ) {
%>
<%=t %>,
<%
}
%>
|
<%
}
}
%>
<%
DefaultJobHistoryParser.KilledOnNodesFilter killedFilter = new DefaultJobHistoryParser.KilledOnNodesFilter();
dir = System.getProperty("hadoop.log.dir") + File.separator + "history" ;
JobHistory.parseHistory(new File(dir, jobTrackerId+"_" + jobid), filter);
badNodes = killedFilter.getValues();
if( badNodes.size() > 0 ) {
%>
Killed tasks attempts by nodes
Hostname | Killed Tasks |
<%
for (Map.Entry> entry : badNodes.entrySet()) {
String node = entry.getKey();
Set killedTasks = entry.getValue();
%>
<%=node %> |
<%
for( String t : killedTasks ) {
%>
<%=t %>,
<%
}
%>
|
<%
}
}
%>