<%@ page contentType="text/html; charset=UTF-8" import="javax.servlet.*" import="javax.servlet.http.*" import="java.io.*" import="java.util.*" import="org.apache.hadoop.fs.*" import="org.apache.hadoop.dfs.*" import="org.apache.hadoop.util.*" import="java.text.DateFormat" import="java.lang.Math" import="java.net.URLEncoder" %> <%!FSNamesystem fsn = FSNamesystem.getFSNamesystem(); String namenodeLabel = fsn.getDFSNameNodeMachine() + ":" + fsn.getDFSNameNodePort(); JspHelper jspHelper = new JspHelper(); int rowNum = 0; int colNum = 0; String rowTxt() { colNum = 0; return " "; } String colTxt() { return " "; } void counterReset () { colNum = 0; rowNum = 0 ; } long diskBytes = 1024 * 1024 * 1024; String diskByteStr = "GB"; String sorterField = null; String sorterOrder = null; String NodeHeaderStr(String name) { String ret = "class=header"; String order = "ASC"; if ( name.equals( sorterField ) ) { ret += sorterOrder; if ( sorterOrder.equals("ASC") ) order = "DSC"; } ret += " onClick=\"window.document.location=" + "'/dfshealth.jsp?sorter/field=" + name + "&sorter/order=" + order + "'\" title=\"sort on this column\""; return ret; } public void generateNodeData( JspWriter out, DatanodeDescriptor d, String suffix, boolean alive ) throws IOException { /* Say the datanode is dn1.hadoop.apache.org with ip 192.168.0.5 we use: 1) d.getHostName():d.getPort() to display. Domain and port are stripped if they are common across the nodes. i.e. "dn1" 2) d.getHost():d.Port() for "title". i.e. "192.168.0.5:50010" 3) d.getHostName():d.getInfoPort() for url. i.e. "http://dn1.hadoop.apache.org:50075/..." Note that "d.getHost():d.getPort()" is what DFS clients use to interact with datanodes. */ // from nn_browsedfscontent.jsp: String url = "http://" + d.getHostName() + ":" + d.getInfoPort() + "/browseDirectory.jsp?namenodeInfoPort=" + fsn.getNameNodeInfoPort() + "&dir=" + URLEncoder.encode("/", "UTF-8"); String name = d.getHostName() + ":" + d.getPort(); if ( !name.matches( "\\d+\\.\\d+.\\d+\\.\\d+.*" ) ) name = name.replaceAll( "\\.[^.:]*", "" ); int idx = (suffix != null && name.endsWith( suffix )) ? name.indexOf( suffix ) : -1; out.print( rowTxt() + "" + (( idx > 0 ) ? name.substring(0, idx) : name) + "" + (( alive ) ? "" : "\n") ); if ( !alive ) return; long c = d.getCapacity(); long u = d.getDfsUsed(); String percentUsed; if (c > 0) percentUsed = FsShell.limitDecimalTo2(((1.0 * u)/c)*100); else percentUsed = "100"; String adminState = (d.isDecommissioned() ? "Decommissioned" : (d.isDecommissionInProgress() ? "Decommission In Progress": "In Service")); long timestamp = d.getLastUpdate(); long currentTime = System.currentTimeMillis(); out.print(" " + ((currentTime - timestamp)/1000) + "" + adminState + "" + FsShell.limitDecimalTo2(c*1.0/diskBytes) + "" + percentUsed +"" + JspHelper.percentageGraph( (int)Double.parseDouble(percentUsed) , 100) + "" + FsShell.limitDecimalTo2(d.getRemaining()*1.0/diskBytes) + "" + d.numBlocks() + "\n"); } public void generateDFSHealthReport(JspWriter out, HttpServletRequest request) throws IOException { ArrayList live = new ArrayList(); ArrayList dead = new ArrayList(); jspHelper.DFSNodesStatus(live, dead); sorterField = request.getParameter("sorter/field"); sorterOrder = request.getParameter("sorter/order"); if ( sorterField == null ) sorterField = "name"; if ( sorterOrder == null ) sorterOrder = "ASC"; jspHelper.sortNodeList(live, sorterField, sorterOrder); jspHelper.sortNodeList(dead, "name", "ASC"); // Find out common suffix. Should this be before or after the sort? String port_suffix = null; if ( live.size() > 0 ) { String name = live.get(0).getName(); int idx = name.indexOf(':'); if ( idx > 0 ) { port_suffix = name.substring( idx ); } for ( int i=1; port_suffix != null && i < live.size(); i++ ) { if ( live.get(i).getName().endsWith( port_suffix ) == false ) { port_suffix = null; break; } } } counterReset(); out.print( "
\n" + rowTxt() + colTxt() + "Capacity" + colTxt() + ":" + colTxt() + FsShell.byteDesc( fsn.getCapacityTotal() ) + rowTxt() + colTxt() + "DFS Remaining" + colTxt() + ":" + colTxt() + FsShell.byteDesc( fsn.getCapacityRemaining() ) + rowTxt() + colTxt() + "DFS Used" + colTxt() + ":" + colTxt() + FsShell.byteDesc( fsn.getCapacityUsed() ) + rowTxt() + colTxt() + "DFS Used%" + colTxt() + ":" + colTxt() + FsShell.limitDecimalTo2((fsn.getCapacityUsed())*100.0/ (fsn.getCapacityTotal() + 1e-10)) + " %" + rowTxt() + colTxt() + "Live Nodes " + colTxt() + ":" + colTxt() + live.size() + rowTxt() + colTxt() + "Dead Nodes " + colTxt() + ":" + colTxt() + dead.size() + "


\n" ); if (live.isEmpty() && dead.isEmpty()) { out.print("There are no datanodes in the cluster"); } else { out.print( "
"+ "" + "Live Datanodes : " + live.size() + "" + "

\n\n" ); counterReset(); if ( live.size() > 0 ) { if ( live.get(0).getCapacity() > 1024 * diskBytes ) { diskBytes *= 1024; diskByteStr = "TB"; } out.print( "
Node Last Contact Admin State Size (" + diskByteStr + ") Used (%) Used (%) Remaining (" + diskByteStr + ") Blocks\n" ); for ( int i=0; i < live.size(); i++ ) { generateNodeData( out, live.get(i), port_suffix, true ); } } out.print("
\n"); counterReset(); out.print("
" + " Dead Datanodes : " +dead.size() + "

\n"); if ( dead.size() > 0 ) { out.print( " " + "
Node \n" ); for ( int i=0; i < dead.size() ; i++ ) { generateNodeData( out, dead.get(i), port_suffix, false ); } out.print("
\n"); } out.print("
"); } }%> Hadoop NameNode <%=namenodeLabel%>

NameNode '<%=namenodeLabel%>'

Started: <%= fsn.getStartTime()%>
Version: <%= VersionInfo.getVersion()%>, r<%= VersionInfo.getRevision()%>
Compiled: <%= VersionInfo.getDate()%> by <%= VersionInfo.getUser()%>
Upgrades: <%= jspHelper.getUpgradeStatusText()%>

Browse the filesystem

Cluster Summary

<%= jspHelper.getSafeModeText()%> <%= jspHelper.getInodeLimitText()%> <% generateDFSHealthReport(out, request); %>

Local logs

Log directory <% out.println(ServletUtil.htmlFooter()); %>