<%@ page contentType="text/html; charset=UTF-8" import="javax.servlet.*" import="javax.servlet.http.*" import="java.io.*" import="java.util.*" import="java.net.*" import="org.apache.hadoop.dfs.*" import="org.apache.hadoop.io.*" import="org.apache.hadoop.conf.*" import="java.text.DateFormat" %> <%! static JspHelper jspHelper = new JspHelper(); public void generateFileChunks(JspWriter out, HttpServletRequest req) throws IOException { long startOffset = 0; int chunkSizeToView = 0; String referrer = req.getParameter("referrer"); boolean noLink = false; if (referrer == null) { noLink = true; } String filename = req.getParameter("filename"); if (filename == null) { out.print("Invalid input (filename absent)"); return; } String chunkSizeToViewStr = req.getParameter("chunkSizeToView"); if (chunkSizeToViewStr != null && Integer.parseInt(chunkSizeToViewStr) > 0) chunkSizeToView = Integer.parseInt(chunkSizeToViewStr); else chunkSizeToView = jspHelper.defaultChunkSizeToView; if (!noLink) out.print("

" + filename + "

"); else out.print("

" + filename + "

"); out.print("Chunk Size to view (in bytes, upto file's DFS blocksize): "); out.print(""); out.print("  
"); out.print(""); if (!noLink) out.print(""); //fetch the block from the datanode that has the last block for this file DFSClient dfs = new DFSClient(jspHelper.nameNodeAddr, jspHelper.conf); LocatedBlock blocks[] = dfs.namenode.open(filename); if (blocks == null || blocks.length == 0) { out.print("No datanodes contain blocks of file "+filename); dfs.close(); return; } LocatedBlock lastBlk = blocks[blocks.length - 1]; long blockSize = lastBlk.getBlock().getNumBytes(); long blockId = lastBlk.getBlock().getBlockId(); DatanodeInfo chosenNode; try { chosenNode = jspHelper.bestNode(lastBlk); } catch (IOException e) { out.print(e.toString()); dfs.close(); return; } InetSocketAddress addr = DataNode.createSocketAddr(chosenNode.getName()); //view the last chunkSizeToView bytes while Tailing if (blockSize >= chunkSizeToView) startOffset = blockSize - chunkSizeToView; else startOffset = 0; out.print(""); dfs.close(); } %> Hadoop DFS File Viewer
<% generateFileChunks(out,request); %>

Local logs

Log directory
Hadoop, 2006.