%@ page
contentType="text/html; charset=UTF-8"
import="javax.servlet.*"
import="javax.servlet.http.*"
import="java.io.*"
import="java.util.*"
import="org.apache.hadoop.dfs.*"
import="org.apache.hadoop.util.*"
import="java.text.DateFormat"
%>
<%!
FSNamesystem fsn = FSNamesystem.getFSNamesystem();
String namenodeLabel = fsn.getDFSNameNodeMachine() + ":" + fsn.getDFSNameNodePort();
long currentTime;
JspHelper jspHelper = new JspHelper();
public void generateLiveNodeData(JspWriter out, DatanodeInfo d)
throws IOException {
long c = d.getCapacity();
long r = d.getRemaining();
long u = c - r;
String cGb = DFSShell.limitDecimal((1.0 * c)/(1024*1024*1024), 2);
String uGb = DFSShell.limitDecimal((1.0 * u)/(1024*1024*1024), 2);
String percentUsed;
if (c > 0)
percentUsed = DFSShell.limitDecimal(((1.0 * u)/c)*100, 2);
else
percentUsed = "100";
out.print("
" +
d.getName() +
" LastContact: " +
(currentTime - d.getLastUpdate())/1000 + " second(s) back; ");
out.print("Total raw bytes: " + c + "(" + cGb +
" GB); ");
out.print("Percent used: " + percentUsed);
out.print(" | ");
}
public void generateDFSHealthReport(JspWriter out) throws IOException {
Vector live = new Vector();
Vector dead = new Vector();
jspHelper.DFSNodesStatus(live, dead);
if (live.isEmpty() && dead.isEmpty()) {
out.print("There are no datanodes in the cluster");
}
else {
out.print("
Number of live data stores: " + live.size() +
", dead datanodes: " + dead.size() + "");
out.print("");
out.print("");
out.print("");
out.print("Live Data Stores
| ");
out.print("Dead Data Stores
| ");
out.print("
");
int i = 0;
int min = (live.size() > dead.size()) ? dead.size() : live.size();
int max = (live.size() > dead.size()) ? live.size() : dead.size();
currentTime = System.currentTimeMillis();
for (i = 0; i < min; i++) {
DatanodeInfo l = (DatanodeInfo)live.elementAt(i);
DatanodeInfo d = (DatanodeInfo)dead.elementAt(i);
out.print("");
generateLiveNodeData(out, l);
out.print("" +
d.getName() +
"
| ");
out.print("
");
}
int type = (live.size() > dead.size()) ? 1 : 0;
for (i = min; i < max; i++) {
out.print("");
if (type == 1) {
DatanodeInfo l = (DatanodeInfo)live.elementAt(i);
generateLiveNodeData(out, l);
out.print("
| ");
}
else if (type == 0) {
DatanodeInfo d = (DatanodeInfo)dead.elementAt(i);
out.print("
| ");
out.print("" +
d.getName() +
"
| ");
}
out.print("
");
}
out.print("
");
}
}
public String totalCapacity() {
return fsn.totalCapacity() + "(" + DFSShell.limitDecimal((1.0 * fsn.totalCapacity())/(1024*1024*1024), 2) + " GB)";
}
public String totalRemaining() {
return fsn.totalRemaining() + "(" + DFSShell.limitDecimal(fsn.totalRemaining()/(1024*1024*1024), 2) + " GB)";
}
%>
Hadoop NameNode <%=namenodeLabel%>
NameNode '<%=namenodeLabel%>'
Started: <%= fsn.getStartTime()%>
Version: <%= VersionInfo.getVersion()%>,
r<%= VersionInfo.getRevision()%>
Compiled: <%= VersionInfo.getDate()%> by
<%= VersionInfo.getUser()%>
Browse the filesystem
Cluster Summary
<%= jspHelper.getSafeModeText()%>
The capacity of this cluster is <%= totalCapacity()%> and remaining is <%= totalRemaining()%>.
<%
generateDFSHealthReport(out);
%>
Local logs
Log directory
Hadoop, 2006.