CPD Results
The following document contains the results of PMD's CPD 4.2.5.
Duplications
File | Line |
---|---|
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobLogHistoryProcessor.java | 104 |
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/Log4jJobHistoryProcessor.java | 104 |
keys.put("JOBID", "job" + jobId.substring(idx1, idx2)); // log.info("JobLogHistoryProcessor Add field: [JOBID][" // + keys.get("JOBID") + "]"); } // if (keys.get("RECORD_TYPE").equalsIgnoreCase("Job") && // keys.containsKey("SUBMIT_TIME")) // { // // Job JOBID="job_200804210403_0005" JOBNAME="MY_JOB" // USER="userxxx" // // SUBMIT_TIME="1208760436751" // JOBCONF="/mapredsystem/xxx.yyy.com/job_200804210403_0005/job.xml" // // // } // else if (keys.get("RECORD_TYPE").equalsIgnoreCase("Job") && // keys.containsKey("LAUNCH_TIME")) // { // // Job JOBID="job_200804210403_0005" LAUNCH_TIME="1208760437110" // TOTAL_MAPS="5912" TOTAL_REDUCES="739" // // } // else if (keys.get("RECORD_TYPE").equalsIgnoreCase("Job") && // keys.containsKey("FINISH_TIME")) // { // // Job JOBID="job_200804210403_0005" FINISH_TIME="1208760906816" // JOB_STATUS="SUCCESS" FINISHED_MAPS="5912" FINISHED_REDUCES="739" // FAILED_MAPS="0" FAILED_REDUCES="0" // // COUNTERS="File Systems.Local bytes read:1735053407244,File // Systems.Local bytes written:2610106384012,File Systems.HDFS bytes // read:801605644910,File Systems.HDFS bytes written:44135800, // // Job Counters .Launched map tasks:5912,Job Counters .Launched // reduce tasks:739,Job Counters .Data-local map tasks:5573,Job // Counters .Rack-local map tasks:316,Map-Reduce Framework. // // Map input records:9410696067,Map-Reduce Framework.Map output // records:9410696067,Map-Reduce Framework.Map input // bytes:801599188816,Map-Reduce Framework.Map output // bytes:784427968116, // // Map-Reduce Framework.Combine input records:0,Map-Reduce // Framework.Combine output records:0,Map-Reduce Framework.Reduce // input groups:477265,Map-Reduce Framework.Reduce input // records:739000, // // Map-Reduce Framework.Reduce output records:739000" // // } // else if (keys.get("RECORD_TYPE").equalsIgnoreCase("MapAttempt") && keys.containsKey("START_TIME")) { // MapAttempt TASK_TYPE="MAP" // TASKID="tip_200804210403_0005_m_000018" // TASK_ATTEMPT_ID="task_200804210403_0005_m_000018_0" // START_TIME="1208760437531" // HOSTNAME="tracker_xxx.yyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:53734" key = new ChukwaRecordKey(); key.setKey("JobLogHist/Map/" + keys.get("JOBID") + "/" + keys.get("START_TIME")); key.setReduceType("JobLogHistoryReduceProcessor"); record = new ChukwaRecord(); record.setTime(Long.parseLong(keys.get("START_TIME"))); record.add("JOBID", keys.get("JOBID")); record.add("START_TIME", keys.get("START_TIME")); record.add(Record.tagsField, chunk.getTags()); // log.info("JobLogHist/Map/S"); output.collect(key, record); } else if (keys.get("RECORD_TYPE").equalsIgnoreCase("MapAttempt") && keys.containsKey("FINISH_TIME")) { // MapAttempt TASK_TYPE="MAP" // TASKID="tip_200804210403_0005_m_005494" // TASK_ATTEMPT_ID="task_200804210403_0005_m_005494_0" // TASK_STATUS="SUCCESS" // FINISH_TIME="1208760624124" // HOSTNAME="tracker_xxxx.yyyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:55491" key = new ChukwaRecordKey(); key.setKey("JobLogHist/Map/" + keys.get("JOBID") + "/" + keys.get("FINISH_TIME")); key.setReduceType("JobLogHistoryReduceProcessor"); record = new ChukwaRecord(); record.setTime(Long.parseLong(keys.get("FINISH_TIME"))); record.add("JOBID", keys.get("JOBID")); record.add("FINISH_TIME", keys.get("FINISH_TIME")); record.add(Record.tagsField, chunk.getTags()); // log.info("JobLogHist/Map/E"); output.collect(key, record); } else if (keys.get("RECORD_TYPE").equalsIgnoreCase("ReduceAttempt") && keys.containsKey("START_TIME")) { // ReduceAttempt TASK_TYPE="REDUCE" // TASKID="tip_200804210403_0005_r_000138" // TASK_ATTEMPT_ID="task_200804210403_0005_r_000138_0" // START_TIME="1208760454885" // HOSTNAME="tracker_xxxx.yyyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:51947" key = new ChukwaRecordKey(); key.setKey("JobLogHist/SHUFFLE/" + keys.get("JOBID") + "/" + keys.get("START_TIME")); key.setReduceType("JobLogHistoryReduceProcessor"); record = new ChukwaRecord(); record.setTime(Long.parseLong(keys.get("START_TIME"))); record.add("JOBID", keys.get("JOBID")); record.add("START_TIME", keys.get("START_TIME")); record.add(Record.tagsField, chunk.getTags()); // log.info("JobLogHist/SHUFFLE/S"); output.collect(key, record); } else if (keys.get("RECORD_TYPE").equalsIgnoreCase("ReduceAttempt") && keys.containsKey("FINISH_TIME")) { // ReduceAttempt TASK_TYPE="REDUCE" // TASKID="tip_200804210403_0005_r_000138" // TASK_ATTEMPT_ID="task_200804210403_0005_r_000138_0" // TASK_STATUS="SUCCESS" SHUFFLE_FINISHED="1208760787167" // SORT_FINISHED="1208760787354" FINISH_TIME="1208760802395" // HOSTNAME="tracker__xxxx.yyyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:51947" key = new ChukwaRecordKey(); key.setKey("JobLogHist/SHUFFLE/" + keys.get("JOBID") + "/" + keys.get("SHUFFLE_FINISHED")); key.setReduceType("JobLogHistoryReduceProcessor"); record = new ChukwaRecord(); record.setTime(Long.parseLong(keys.get("SHUFFLE_FINISHED"))); record.add("JOBID", keys.get("JOBID")); record.add("SHUFFLE_FINISHED", keys.get("SHUFFLE_FINISHED")); record.add(Record.tagsField, chunk.getTags()); // log.info("JobLogHist/SHUFFLE/E"); output.collect(key, record); // SORT key = new ChukwaRecordKey(); key.setKey("JobLogHist/SORT/" + keys.get("JOBID") + "/" + keys.get("SHUFFLE_FINISHED")); key.setReduceType("JobLogHistoryReduceProcessor"); record = new ChukwaRecord(); record.setTime(Long.parseLong(keys.get("SHUFFLE_FINISHED"))); record.add("JOBID", keys.get("JOBID")); record.add("START_TIME", keys.get("SHUFFLE_FINISHED")); record.add(Record.tagsField, chunk.getTags()); // log.info("JobLogHist/SORT/S"); output.collect(key, record); key = new ChukwaRecordKey(); key.setKey("JobLogHist/SORT/" + keys.get("JOBID") + "/" + keys.get("SORT_FINISHED")); key.setReduceType("JobLogHistoryReduceProcessor"); record = new ChukwaRecord(); record.setTime(Long.parseLong(keys.get("SORT_FINISHED"))); record.add("JOBID", keys.get("JOBID")); record.add("SORT_FINISHED", keys.get("SORT_FINISHED")); record.add(Record.tagsField, chunk.getTags()); // log.info("JobLogHist/SORT/E"); output.collect(key, record); // Reduce key = new ChukwaRecordKey(); key.setKey("JobLogHist/REDUCE/" + keys.get("JOBID") + "/" + keys.get("SORT_FINISHED")); key.setReduceType("JobLogHistoryReduceProcessor"); record = new ChukwaRecord(); record.setTime(Long.parseLong(keys.get("SORT_FINISHED"))); record.add("JOBID", keys.get("JOBID")); record.add("START_TIME", keys.get("SORT_FINISHED")); record.add(Record.tagsField, chunk.getTags()); // log.info("JobLogHist/REDUCE/S"); output.collect(key, record); key = new ChukwaRecordKey(); key.setKey("JobLogHist/REDUCE/" + keys.get("JOBID") + "/" + keys.get("FINISH_TIME")); key.setReduceType("JobLogHistoryReduceProcessor"); record = new ChukwaRecord(); record.setTime(Long.parseLong(keys.get("SORT_FINISHED"))); record.add("JOBID", keys.get("JOBID")); record.add("FINISH_TIME", keys.get("SORT_FINISHED")); record.add(Record.tagsField, chunk.getTags()); // log.info("JobLogHist/REDUCE/E"); output.collect(key, record); } else if (keys.get("RECORD_TYPE").equalsIgnoreCase("Job") |
File | Line |
---|---|
org/apache/hadoop/chukwa/hicc/OfflineTimeHandler.java | 112 |
org/apache/hadoop/chukwa/hicc/TimeHandler.java | 174 |
} SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm"); SimpleDateFormat formatDate = new SimpleDateFormat("yyyy-MM-dd"); SimpleDateFormat formatHour = new SimpleDateFormat("HH"); SimpleDateFormat formatMin = new SimpleDateFormat("mm"); formatter.setTimeZone(this.tz); formatDate.setTimeZone(this.tz); formatHour.setTimeZone(this.tz); formatMin.setTimeZone(this.tz); startS = formatter.format(start); this.startDate = formatDate.format(start); this.startHour = formatHour.format(start); this.startMin = formatMin.format(start); endS = formatter.format(end); this.endDate = formatDate.format(end); this.endHour = formatHour.format(end); this.endMin = formatMin.format(end); } public String getStartDate(String format) { SimpleDateFormat formatter = new SimpleDateFormat(format); formatter.setTimeZone(this.tz); return formatter.format(this.start); } public String getStartDate() { return this.startDate; } public String getStartHour() { return this.startHour; } public String getStartMinute() { return this.startMin; } public String getStartTimeText() { return this.startS; } public long getStartTime() { return start; } public String getEndDate(String format) { SimpleDateFormat formatter = new SimpleDateFormat(format); formatter.setTimeZone(this.tz); return formatter.format(this.end); } public String getEndDate() { return this.endDate; } public String getEndHour() { return this.endHour; } public String getEndMinute() { return this.endMin; } public String getEndTimeText() { return this.endS; } public long getEndTime() { return end; } } |
File | Line |
---|---|
org/apache/hadoop/chukwa/ChukwaArchiveKey.java | 311 |
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordKey.java | 232 |
{ int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1); int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2); int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1); int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2); s1 += z1; s2 += z2; l1 -= z1; l2 -= z2; int r1 = org.apache.hadoop.record.Utils.compareBytes(b1, s1, i1, b2, s2, i2); if (r1 != 0) { return (r1 < 0) ? -1 : 0; } s1 += i1; s2 += i2; l1 -= i1; l1 -= i2; } { int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1); int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2); int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1); int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2); s1 += z1; s2 += z2; l1 -= z1; l2 -= z2; int r1 = org.apache.hadoop.record.Utils.compareBytes(b1, s1, i1, b2, s2, i2); if (r1 != 0) { return (r1 < 0) ? -1 : 0; } s1 += i1; s2 += i2; l1 -= i1; l1 -= i2; } |
File | Line |
---|---|
org/apache/hadoop/chukwa/ChukwaArchiveKey.java | 53 |
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java | 49 |
} public static org.apache.hadoop.record.meta.RecordTypeInfo getTypeInfo() { return _rio_recTypeInfo; } public static void setTypeFilter( org.apache.hadoop.record.meta.RecordTypeInfo rti) { if (null == rti) return; _rio_rtiFilter = rti; _rio_rtiFilterFields = null; } private static void setupRtiFields() { if (null == _rio_rtiFilter) return; // we may already have done this if (null != _rio_rtiFilterFields) return; int _rio_i, _rio_j; _rio_rtiFilterFields = new int[_rio_rtiFilter.getFieldTypeInfos().size()]; for (_rio_i = 0; _rio_i < _rio_rtiFilterFields.length; _rio_i++) { _rio_rtiFilterFields[_rio_i] = 0; } java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_itFilter = _rio_rtiFilter .getFieldTypeInfos().iterator(); _rio_i = 0; while (_rio_itFilter.hasNext()) { org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfoFilter = _rio_itFilter .next(); java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_it = _rio_recTypeInfo .getFieldTypeInfos().iterator(); _rio_j = 1; while (_rio_it.hasNext()) { org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfo = _rio_it.next(); if (_rio_tInfo.equals(_rio_tInfoFilter)) { _rio_rtiFilterFields[_rio_i] = _rio_j; break; } _rio_j++; } _rio_i++; } } public long getTime() { |
File | Line |
---|---|
org/apache/hadoop/chukwa/ChukwaArchiveKey.java | 53 |
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordKey.java | 44 |
} public static org.apache.hadoop.record.meta.RecordTypeInfo getTypeInfo() { return _rio_recTypeInfo; } public static void setTypeFilter( org.apache.hadoop.record.meta.RecordTypeInfo rti) { if (null == rti) return; _rio_rtiFilter = rti; _rio_rtiFilterFields = null; } private static void setupRtiFields() { if (null == _rio_rtiFilter) return; // we may already have done this if (null != _rio_rtiFilterFields) return; int _rio_i, _rio_j; _rio_rtiFilterFields = new int[_rio_rtiFilter.getFieldTypeInfos().size()]; for (_rio_i = 0; _rio_i < _rio_rtiFilterFields.length; _rio_i++) { _rio_rtiFilterFields[_rio_i] = 0; } java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_itFilter = _rio_rtiFilter .getFieldTypeInfos().iterator(); _rio_i = 0; while (_rio_itFilter.hasNext()) { org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfoFilter = _rio_itFilter .next(); java.util.Iterator<org.apache.hadoop.record.meta.FieldTypeInfo> _rio_it = _rio_recTypeInfo .getFieldTypeInfos().iterator(); _rio_j = 1; while (_rio_it.hasNext()) { org.apache.hadoop.record.meta.FieldTypeInfo _rio_tInfo = _rio_it.next(); if (_rio_tInfo.equals(_rio_tInfoFilter)) { _rio_rtiFilterFields[_rio_i] = _rio_j; break; } _rio_j++; } _rio_i++; } } public String getReduceType() { |
File | Line |
---|---|
org/apache/hadoop/chukwa/hicc/OfflineTimeHandler.java | 92 |
org/apache/hadoop/chukwa/hicc/TimeHandler.java | 102 |
Calendar now = Calendar.getInstance(); this.start = now.getTimeInMillis(); this.end = now.getTimeInMillis(); if (period.equals("last1hr")) { start = end - (60 * 60 * 1000); } else if (period.equals("last2hr")) { start = end - (2 * 60 * 60 * 1000); } else if (period.equals("last3hr")) { start = end - (3 * 60 * 60 * 1000); } else if (period.equals("last6hr")) { start = end - (6 * 60 * 60 * 1000); } else if (period.equals("last12hr")) { start = end - (12 * 60 * 60 * 1000); } else if (period.equals("last24hr")) { start = end - (24 * 60 * 60 * 1000); } else if (period.equals("last7d")) { start = end - (7 * 24 * 60 * 60 * 1000); } else if (period.equals("last30d")) { start = end - (30L * 24 * 60 * 60 * 1000); } else if (period.startsWith("custom;")) { |
File | Line |
---|---|
org/apache/hadoop/chukwa/hicc/OfflineTimeHandler.java | 62 |
org/apache/hadoop/chukwa/hicc/OfflineTimeHandler.java | 91 |
&& map.get("period") != null) { String period = map.get("period"); this.start = now.getTimeInMillis(); this.end = now.getTimeInMillis(); if (period.equals("last1hr")) { start = end - (60 * 60 * 1000); } else if (period.equals("last2hr")) { start = end - (2 * 60 * 60 * 1000); } else if (period.equals("last3hr")) { start = end - (3 * 60 * 60 * 1000); } else if (period.equals("last6hr")) { start = end - (6 * 60 * 60 * 1000); } else if (period.equals("last12hr")) { start = end - (12 * 60 * 60 * 1000); } else if (period.equals("last24hr")) { start = end - (24 * 60 * 60 * 1000); } else if (period.equals("last7d")) { start = end - (7 * 24 * 60 * 60 * 1000); } else if (period.equals("last30d")) { start = end - (30L * 24 * 60 * 60 * 1000); |
File | Line |
---|---|
org/apache/hadoop/chukwa/hicc/OfflineTimeHandler.java | 63 |
org/apache/hadoop/chukwa/hicc/TimeHandler.java | 102 |
Calendar now = Calendar.getInstance(); this.start = now.getTimeInMillis(); this.end = now.getTimeInMillis(); if (period.equals("last1hr")) { start = end - (60 * 60 * 1000); } else if (period.equals("last2hr")) { start = end - (2 * 60 * 60 * 1000); } else if (period.equals("last3hr")) { start = end - (3 * 60 * 60 * 1000); } else if (period.equals("last6hr")) { start = end - (6 * 60 * 60 * 1000); } else if (period.equals("last12hr")) { start = end - (12 * 60 * 60 * 1000); } else if (period.equals("last24hr")) { start = end - (24 * 60 * 60 * 1000); } else if (period.equals("last7d")) { start = end - (7 * 24 * 60 * 60 * 1000); } else if (period.equals("last30d")) { start = end - (30L * 24 * 60 * 60 * 1000); |
File | Line |
---|---|
org/apache/hadoop/chukwa/ChukwaArchiveKey.java | 287 |
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java | 294 |
} return (os - s); } catch (java.io.IOException e) { throw new RuntimeException(e); } } static public int compareRaw(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { try { int os1 = s1; { long i1 = org.apache.hadoop.record.Utils.readVLong(b1, s1); long i2 = org.apache.hadoop.record.Utils.readVLong(b2, s2); if (i1 != i2) { return ((i1 - i2) < 0) ? -1 : 0; } int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1); int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2); s1 += z1; s2 += z2; l1 -= z1; l2 -= z2; } { int mi11 = org.apache.hadoop.record.Utils.readVInt(b1, s1); |
File | Line |
---|---|
org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java | 670 |
org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java | 710 |
} else if (this.query_stat_type.equals("total_volume")) { for(int i=0;i<events.size();i++) { HashMap<String, Object> event = events.get(i); start=(Long)event.get("start_time"); end=(Long)event.get("finish_time"); start_millis = Integer.parseInt(((String)event.get("start_time_millis"))); end_millis = Integer.parseInt(((String)event.get("finish_time_millis"))); String this_host = (String) event.get("hostname"); String other_host = (String) event.get("other_host"); int this_host_idx = host_indices.get(this_host).intValue(); int other_host_idx = host_indices.get(other_host).intValue(); long curr_val = Long.parseLong((String)event.get("bytes")); // to, from stats[other_host_idx][this_host_idx] += curr_val; |
File | Line |
---|---|
org/apache/hadoop/chukwa/analysis/salsa/fsm/DataNodeClientTraceMapper.java | 198 |
org/apache/hadoop/chukwa/analysis/salsa/fsm/TaskTrackerClientTraceMapper.java | 184 |
end_rec.generateUniqueID(); start_rec.add_info.put(Record.tagsField,val.getValue(Record.tagsField)); start_rec.add_info.put("csource",val.getValue("csource")); end_rec.add_info.put(Record.tagsField,val.getValue(Record.tagsField)); end_rec.add_info.put("csource",val.getValue("csource")); end_rec.add_info.put("STATE_STRING",new String("SUCCESS")); // by default // add counter value end_rec.add_info.put("BYTES",val.getValue("bytes")); String crk_mid_string_start = new String(start_rec.getUniqueID() + "_" + start_rec.timestamp); String crk_mid_string_end = new String(end_rec.getUniqueID() + "_" + start_rec.timestamp); output.collect(new ChukwaRecordKey(FSM_CRK_ReduceType, crk_mid_string_start), start_rec); output.collect(new ChukwaRecordKey(FSM_CRK_ReduceType, crk_mid_string_end), end_rec); } } // end of mapper class |
File | Line |
---|---|
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java | 102 |
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java | 126 |
); while (rownumiter.hasNext()) { origrownum = ((Integer)rownumiter.next()).intValue(); newrownum = this.plot_tab.addRow(); this.plot_tab.set(newrownum, "state_name", orig_tab.getString(origrownum, "state_name")); this.plot_tab.set(newrownum, "ycoord", orig_tab.getInt(origrownum, "seqno")); this.plot_tab.set(newrownum,"hostname",orig_tab.getString(origrownum,"hostname")); this.plot_tab.set(newrownum,"friendly_id",orig_tab.getString(origrownum,"friendly_id")); this.plot_tab.set(newrownum,START_FIELD_NAME, orig_tab.getDouble(origrownum,START_FIELD_NAME)); this.plot_tab.set(newrownum,END_FIELD_NAME, orig_tab.getDouble(origrownum,END_FIELD_NAME)); } |
File | Line |
---|---|
org/apache/hadoop/chukwa/ChukwaArchiveKey.java | 311 |
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java | 328 |
{ int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1); int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2); int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1); int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2); s1 += z1; s2 += z2; l1 -= z1; l2 -= z2; int r1 = org.apache.hadoop.record.Utils.compareBytes(b1, s1, i1, b2, s2, i2); if (r1 != 0) { return (r1 < 0) ? -1 : 0; } s1 += i1; s2 += i2; l1 -= i1; l1 -= i2; } { int i = org.apache.hadoop.record.Utils.readVInt(b1, s1); |
File | Line |
---|---|
org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java | 647 |
org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java | 670 |
} else if (this.query_stat_type.equals("avg_volume")) { for(int i=0;i<events.size();i++) { HashMap<String, Object> event = events.get(i); start=(Long)event.get("start_time"); end=(Long)event.get("finish_time"); start_millis = Integer.parseInt(((String)event.get("start_time_millis"))); end_millis = Integer.parseInt(((String)event.get("finish_time_millis"))); String this_host = (String) event.get("hostname"); String other_host = (String) event.get("other_host"); int this_host_idx = host_indices.get(this_host).intValue(); int other_host_idx = host_indices.get(other_host).intValue(); long curr_val = Long.parseLong((String)event.get("bytes")); |
File | Line |
---|---|
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordKey.java | 232 |
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordKey.java | 251 |
{ int i1 = org.apache.hadoop.record.Utils.readVInt(b1, s1); int i2 = org.apache.hadoop.record.Utils.readVInt(b2, s2); int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1); int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2); s1 += z1; s2 += z2; l1 -= z1; l2 -= z2; int r1 = org.apache.hadoop.record.Utils.compareBytes(b1, s1, i1, b2, s2, i2); if (r1 != 0) { return (r1 < 0) ? -1 : 0; } s1 += i1; s2 += i2; l1 -= i1; l1 -= i2; } |
File | Line |
---|---|
org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java | 632 |
org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java | 647 |
} else if (this.query_stat_type.equals("total_duration")) { for(int i=0;i<events.size();i++) { HashMap<String, Object> event = events.get(i); start=(Long)event.get("start_time"); end=(Long)event.get("finish_time"); start_millis = Integer.parseInt(((String)event.get("start_time_millis"))); end_millis = Integer.parseInt(((String)event.get("finish_time_millis"))); String this_host = (String) event.get("hostname"); String other_host = (String) event.get("other_host"); int this_host_idx = host_indices.get(this_host).intValue(); int other_host_idx = host_indices.get(other_host).intValue(); |
File | Line |
---|---|
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java | 125 |
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java | 178 |
(Predicate) ExpressionParser.parse("[state_name] == 'map' OR [state_name] == 'reduce'") ); while (rownumiter.hasNext()) { origrownum = ((Integer)rownumiter.next()).intValue(); newrownum = this.plot_tab.addRow(); this.plot_tab.set(newrownum, "state_name", orig_tab.getString(origrownum, "state_name")); this.plot_tab.set(newrownum, "ycoord", orig_tab.getInt(origrownum, "seqno")); this.plot_tab.set(newrownum,"hostname",orig_tab.getString(origrownum,"hostname")); this.plot_tab.set(newrownum,"friendly_id",orig_tab.getString(origrownum,"friendly_id")); this.plot_tab.set(newrownum,START_FIELD_NAME, orig_tab.getDouble(origrownum,START_FIELD_NAME)); this.plot_tab.set(newrownum,START_FIELD_NAME, orig_tab.getDouble(origrownum,END_FIELD_NAME)); |
File | Line |
---|---|
org/apache/hadoop/chukwa/analysis/salsa/fsm/DataNodeClientTraceMapper.java | 150 |
org/apache/hadoop/chukwa/analysis/salsa/fsm/TaskTrackerClientTraceMapper.java | 151 |
start_rec.time_orig_epoch = k[0]; start_rec.time_orig = (new Long(actual_time_ms)).toString(); // not actually used start_rec.timestamp = (new Long(actual_time_ms)).toString(); start_rec.time_end = new String(""); start_rec.time_start = new String(start_rec.timestamp); end_rec.time_orig_epoch = k[0]; end_rec.time_orig = val.getValue("actual_time"); end_rec.timestamp = new String(val.getValue("actual_time")); end_rec.time_end = new String(val.getValue("actual_time")); end_rec.time_start = new String(""); log.debug("Duration: " + (Long.parseLong(end_rec.time_end) - Long.parseLong(start_rec.time_start))); |
File | Line |
---|---|
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java | 102 |
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java | 179 |
); while (rownumiter.hasNext()) { origrownum = ((Integer)rownumiter.next()).intValue(); newrownum = this.plot_tab.addRow(); this.plot_tab.set(newrownum, "state_name", orig_tab.getString(origrownum, "state_name")); this.plot_tab.set(newrownum, "ycoord", orig_tab.getInt(origrownum, "seqno")); this.plot_tab.set(newrownum,"hostname",orig_tab.getString(origrownum,"hostname")); this.plot_tab.set(newrownum,"friendly_id",orig_tab.getString(origrownum,"friendly_id")); this.plot_tab.set(newrownum,START_FIELD_NAME, orig_tab.getDouble(origrownum,START_FIELD_NAME)); this.plot_tab.set(newrownum,START_FIELD_NAME, orig_tab.getDouble(origrownum,END_FIELD_NAME)); |
File | Line |
---|---|
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobLogHistoryProcessor.java | 337 |
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/Log4jJobHistoryProcessor.java | 316 |
record.add("HodId", keys.get("HODID")); } // log.info("MRJobCounters +1"); output.collect(key, record); } if (keys.containsKey("TASK_TYPE") && keys.containsKey("COUNTERS") && (keys.get("TASK_TYPE").equalsIgnoreCase("REDUCE") || keys.get( "TASK_TYPE").equalsIgnoreCase("MAP"))) { // MAP // Task TASKID="tip_200804210403_0005_m_000154" TASK_TYPE="MAP" // TASK_STATUS="SUCCESS" FINISH_TIME="1208760463883" // COUNTERS="File Systems.Local bytes read:159265655,File // Systems.Local bytes written:318531310, // File Systems.HDFS bytes read:145882417,Map-Reduce // Framework.Map input records:1706604, // Map-Reduce Framework.Map output records:1706604,Map-Reduce // Framework.Map input bytes:145882057, // Map-Reduce Framework.Map output bytes:142763253,Map-Reduce // Framework.Combine input records:0,Map-Reduce // Framework.Combine output records:0" // REDUCE // Task TASKID="tip_200804210403_0005_r_000524" // TASK_TYPE="REDUCE" TASK_STATUS="SUCCESS" // FINISH_TIME="1208760877072" // COUNTERS="File Systems.Local bytes read:1179319677,File // Systems.Local bytes written:1184474889,File Systems.HDFS // bytes written:59021, // Map-Reduce Framework.Reduce input groups:684,Map-Reduce // Framework.Reduce input records:1000,Map-Reduce // Framework.Reduce output records:1000" record = new ChukwaRecord(); key = new ChukwaRecordKey(); buildGenericRecord(record, null, Long .parseLong(keys.get("FINISH_TIME")), "SizeVsFinishTime"); extractCounters(record, keys.get("COUNTERS")); record.add("JOBID", keys.get("JOBID")); record.add("TASKID", keys.get("TASKID")); record.add("TASK_TYPE", keys.get("TASK_TYPE")); |
File | Line |
---|---|
org/apache/hadoop/chukwa/hicc/rest/MetricsController.java | 148 |
org/apache/hadoop/chukwa/hicc/rest/MetricsController.java | 179 |
public String getRowNames(@Context HttpServletRequest request, @PathParam("table") String tableName, @PathParam("family") String family, @PathParam("column") String column, @QueryParam("start") String start, @QueryParam("end") String end, @QueryParam("fullScan") @DefaultValue("false") boolean fullScan) { SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); long startTime = 0; long endTime = 0; TimeHandler time = new TimeHandler(request); try { if(start!=null) { startTime = sdf.parse(start).getTime(); } else { startTime = time.getStartTime(); } if(end!=null) { endTime = sdf.parse(end).getTime(); } else { endTime = time.getEndTime(); } } catch(ParseException e) { throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST) .entity("Start/End date parse error. Format: yyyyMMddHHmmss.").build()); } Set<String> columnNames = ChukwaHBaseStore.getRowNames(tableName, family, column, startTime, endTime, fullScan); |
File | Line |
---|---|
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/HadoopMetricsProcessor.java | 74 |
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobSummary.java | 48 |
public JobSummary() { // TODO move that to config sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm"); } @SuppressWarnings("unchecked") @Override protected void parse(String recordEntry, OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter) throws Throwable { try { // Look for syslog PRI, if PRI is not found, start from offset of 0. int idx = recordEntry.indexOf('>', 0); String dStr = recordEntry.substring(idx+1, idx+23); int start = idx + 25; idx = recordEntry.indexOf(' ', start); // String level = recordEntry.substring(start, idx); start = idx + 1; idx = recordEntry.indexOf(' ', start); // String className = recordEntry.substring(start, idx-1); String body = recordEntry.substring(idx + 1); body = body.replaceAll("\n", ""); // log.info("record [" + recordEntry + "] body [" + body +"]"); Date d = sdf.parse(dStr); |
File | Line |
---|---|
org/apache/hadoop/chukwa/database/DatabaseConfig.java | 56 |
org/apache/hadoop/chukwa/inputtools/mdl/DataConfig.java | 61 |
if (System.getenv("CHUKWA_CONF_DIR") != null) { // Allow site-specific MDL files to be included in the // configuration so as to keep the "main" mdl.xml pure. File confDir = new File(System.getenv("CHUKWA_CONF_DIR")); File[] confFiles = confDir.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { // Implements a naming convention of ending with "mdl.xml" // but is careful not to pick up mdl.xml itself again. return name.endsWith(MDL_XML) && !name.equals(MDL_XML); } }); if (confFiles != null) { for (File confFile : confFiles) config.addResource(new Path(confFile.getAbsolutePath())); } } } public String get(String key) { return config.get(key); } public void put(String key, String value) { |
File | Line |
---|---|
org/apache/hadoop/chukwa/analysis/salsa/fsm/DataNodeClientTraceMapper.java | 98 |
org/apache/hadoop/chukwa/analysis/salsa/fsm/TaskTrackerClientTraceMapper.java | 97 |
end_rec.fsm_type = new FSMType(FSMType.MAPREDUCE_FSM); end_rec.state_type = new StateType(StateType.STATE_END); /* extract addresses */ Matcher src_regex = ipPattern.matcher(val.getValue("src")); if (src_regex.matches()) { src_add = src_regex.group(1); } else { log.warn("Failed to match src IP:"+val.getValue("src")+""); src_add = new String(""); } Matcher dest_regex = ipPattern.matcher(val.getValue("dest")); if (dest_regex.matches()) { dest_add = dest_regex.group(1); } else { log.warn("Failed to match dest IP:"+val.getValue("dest")+""); dest_add = new String(""); } |
File | Line |
---|---|
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java | 104 |
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java | 153 |
String curr_reduce = orig_tab.getString(origrownum, "friendly_id"); newrownum = this.plot_tab.addRow(); this.plot_tab.set(newrownum, "state_name", orig_tab.getString(origrownum, "state_name")); this.plot_tab.set(newrownum, "ycoord", orig_tab.getInt(origrownum, "seqno")); this.plot_tab.set(newrownum,"hostname",orig_tab.getString(origrownum,"hostname")); this.plot_tab.set(newrownum,"friendly_id",orig_tab.getString(origrownum,"friendly_id")); this.plot_tab.set(newrownum,START_FIELD_NAME, orig_tab.getDouble(origrownum,START_FIELD_NAME)); this.plot_tab.set(newrownum,END_FIELD_NAME, orig_tab.getDouble(origrownum,END_FIELD_NAME)); |
File | Line |
---|---|
org/apache/hadoop/chukwa/analysis/salsa/visualization/Heatmap.java | 229 |
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java | 538 |
this.jobname = new String(s); } /** * Set dimensions of image to be generated * Call before calling @see #run */ public void setDimensions(int width, int height) { this.SIZE_X=width; this.SIZE_Y=height; } /** * Specify whether to print legend of states * Advisable to not print legend for excessively small images since * legend has fixed point size * Call before calling @see #run */ public void setLegend(boolean legendopt) { if (legendopt) { this.plot_legend = true; } else { this.plot_legend = false; } } /** * Generates image in specified format, and writes image as binary * output to supplied output stream */ public boolean getImage(java.io.OutputStream output, String img_fmt, double scale) { dis = new Display(this.viz); dis.setSize(SIZE_X,SIZE_Y); dis.setHighQuality(true); dis.setFont(new Font(Font.SANS_SERIF,Font.PLAIN,24)); return dis.saveImage(output, img_fmt, scale); } /** * Adds a column to given table by converting timestamp to long with * seconds since epoch, and adding milliseconds from additional column * in original table * * @param origTable Table to add to * @param srcFieldName Name of column containing timestamp * @param srcMillisecondFieldName Name of column containing millisecond value of time * @param dstFieldName Name of new column to add * * @return Modified table with added column */ protected Table addTimeCol |
File | Line |
---|---|
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobLogHistoryProcessor.java | 70 |
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/Log4jJobHistoryProcessor.java | 75 |
body = body.substring(firstSep); internalMatcher.reset(body); // String fieldName = null; // String fieldValue = null; while (internalMatcher.matches()) { keys.put(internalMatcher.group(1).trim(), internalMatcher.group(2) .trim()); // TODO Remove debug info before production // fieldName = internalMatcher.group(1).trim(); // fieldValue = internalMatcher.group(2).trim(); // log.info("JobLogHistoryProcessor Add field: [" + fieldName + // "][" + fieldValue +"]" ); // log.info("EOL : [" + internalMatcher.group(3) + "]" ); internalMatcher.reset(internalMatcher.group(3)); } if (!keys.containsKey("JOBID")) { // Extract JobID from taskID // JOBID = "job_200804210403_0005" // TASKID = "tip_200804210403_0005_m_000018" String jobId = keys.get("TASKID"); int idx1 = jobId.indexOf('_', 0); int idx2 = jobId.indexOf('_', idx1 + 1); idx2 = jobId.indexOf('_', idx2 + 1); keys.put("JOBID", "job" + jobId.substring(idx1, idx2)); |
File | Line |
---|---|
org/apache/hadoop/chukwa/datastore/ChukwaHBaseStore.java | 147 |
org/apache/hadoop/chukwa/datastore/ChukwaHBaseStore.java | 187 |
scan.addColumn(family.getBytes(), qualifier.getBytes()); if(!fullScan) { // Take sample columns of the recent time. StringBuilder temp = new StringBuilder(); temp.append(endTime-300000L); scan.setStartRow(temp.toString().getBytes()); temp.setLength(0); temp.append(endTime); scan.setStopRow(temp.toString().getBytes()); } else { StringBuilder temp = new StringBuilder(); temp.append(startTime); scan.setStartRow(temp.toString().getBytes()); temp.setLength(0); temp.append(endTime); scan.setStopRow(temp.toString().getBytes()); } |
File | Line |
---|---|
org/apache/hadoop/chukwa/ChukwaArchiveKey.java | 169 |
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordKey.java | 136 |
key = _rio_a.readString("key"); } else { java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo> typeInfos = (java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo>) (_rio_rtiFilter .getFieldTypeInfos()); org.apache.hadoop.record.meta.Utils.skip(_rio_a, typeInfos.get(_rio_i) .getFieldID(), typeInfos.get(_rio_i).getTypeID()); } } _rio_a.endRecord(_rio_tag); } public int compareTo(final Object _rio_peer_) throws ClassCastException { if (!(_rio_peer_ instanceof ChukwaRecordKey)) { |
File | Line |
---|---|
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java | 180 |
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordKey.java | 137 |
} else { java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo> typeInfos = (java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo>) (_rio_rtiFilter .getFieldTypeInfos()); org.apache.hadoop.record.meta.Utils.skip(_rio_a, typeInfos.get(_rio_i) .getFieldID(), typeInfos.get(_rio_i).getTypeID()); } } _rio_a.endRecord(_rio_tag); } public int compareTo(final Object _rio_peer_) throws ClassCastException { if (!(_rio_peer_ instanceof ChukwaRecordKey)) { |
File | Line |
---|---|
org/apache/hadoop/chukwa/database/DatabaseConfig.java | 163 |
org/apache/hadoop/chukwa/database/DatabaseConfig.java | 252 |
partitionSize = CENTURY; } currentPartition = now / partitionSize; startPartition = start / partitionSize; endPartition = end / partitionSize; } else { fallback = false; } } if (startPartition != endPartition) { int delta = (int) (endPartition - startPartition); tableNames = new String[delta + 1]; for (int i = 0; i <= delta; i++) { long partition = startPartition + (long) i; tableNames[i] = tableName + "_" + partition + tableType; } } else { tableNames = new String[1]; tableNames[0] = tableName + "_" + startPartition + tableType; } return tableNames; } public static void main(String[] args) { |
File | Line |
---|---|
org/apache/hadoop/chukwa/ChukwaArchiveKey.java | 170 |
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java | 180 |
} else { java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo> typeInfos = (java.util.ArrayList<org.apache.hadoop.record.meta.FieldTypeInfo>) (_rio_rtiFilter .getFieldTypeInfos()); org.apache.hadoop.record.meta.Utils.skip(_rio_a, typeInfos.get(_rio_i) .getFieldID(), typeInfos.get(_rio_i).getTypeID()); } } _rio_a.endRecord(_rio_tag); } public int compareTo(final Object _rio_peer_) throws ClassCastException { if (!(_rio_peer_ instanceof ChukwaRecordJT)) { |
File | Line |
---|---|
org/apache/hadoop/chukwa/hicc/DatasetMapper.java | 185 |
org/apache/hadoop/chukwa/util/DatabaseWriter.java | 148 |
public void close() { // it is a good idea to release // resources in a finally{} block // in reverse-order of their creation // if they are no-longer needed if (rs != null) { try { rs.close(); } catch (SQLException sqlEx) { // ignore log.debug(ExceptionUtil.getStackTrace(sqlEx)); } rs = null; } if (stmt != null) { try { stmt.close(); } catch (SQLException sqlEx) { // ignore log.debug(ExceptionUtil.getStackTrace(sqlEx)); } stmt = null; } if (conn != null) { try { conn.close(); } catch (SQLException sqlEx) { // ignore log.debug(ExceptionUtil.getStackTrace(sqlEx)); } conn = null; } } |
File | Line |
---|---|
org/apache/hadoop/chukwa/rest/resource/ViewContextResolver.java | 49 |
org/apache/hadoop/chukwa/rest/resource/WidgetContextResolver.java | 46 |
public WidgetContextResolver() throws Exception { Map props = new HashMap<String, Object>(); props.put(JSONJAXBContext.JSON_NOTATION, JSONJAXBContext.JSONNotation.MAPPED); props.put(JSONJAXBContext.JSON_ROOT_UNWRAPPING, Boolean.TRUE); props.put(JSONJAXBContext.JSON_ARRAYS, jsonArray); this.types = new HashSet<Class<?>>(Arrays.asList(classTypes)); this.context = new JSONJAXBContext(classTypes, props); } public JAXBContext getContext(Class<?> objectType) { return (types.contains(objectType)) ? context : null; } } |
File | Line |
---|---|
org/apache/hadoop/chukwa/analysis/salsa/fsm/DataNodeClientTraceMapper.java | 50 |
org/apache/hadoop/chukwa/analysis/salsa/fsm/TaskTrackerClientTraceMapper.java | 51 |
Pattern.compile("([0-9]+\\.[0-9]+\\.[0-9]+\\.[0-9]+)[a-zA-Z\\-_:\\/].*"); public void map (ChukwaRecordKey key, ChukwaRecord val, OutputCollector<ChukwaRecordKey, FSMIntermedEntry> output, Reporter reporter) throws IOException { /* Extract field names for checking */ String [] fieldNames = val.getFields(); ArrayList<String> fieldNamesList = new ArrayList<String>(fieldNames.length); for (int i = 0; i < fieldNames.length; i++) { fieldNamesList.add(fieldNames[i]); } // Handle ClientTraceDetailed and DataNodeLog entries separately // because we need to combine both types of entries for a complete picture if (key.getReduceType().equals("ClientTraceDetailed")) { assert(fieldNamesList.contains("op")); if (val.getValue("op").startsWith("MAPRED")) { |
File | Line |
---|---|
org/apache/hadoop/chukwa/ChukwaArchiveKey.java | 298 |
org/apache/hadoop/chukwa/ChukwaArchiveKey.java | 349 |
{ long i1 = org.apache.hadoop.record.Utils.readVLong(b1, s1); long i2 = org.apache.hadoop.record.Utils.readVLong(b2, s2); if (i1 != i2) { return ((i1 - i2) < 0) ? -1 : 0; } int z1 = org.apache.hadoop.record.Utils.getVIntSize(i1); int z2 = org.apache.hadoop.record.Utils.getVIntSize(i2); s1 += z1; s2 += z2; l1 -= z1; l2 -= z2; } |
File | Line |
---|---|
org/apache/hadoop/chukwa/ChukwaArchiveKey.java | 361 |
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java | 363 |
} return (os1 - s1); } catch (java.io.IOException e) { throw new RuntimeException(e); } } public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) { int ret = compareRaw(b1, s1, l1, b2, s2, l2); return (ret == -1) ? -1 : ((ret == 0) ? 1 : 0); } } static { org.apache.hadoop.record.RecordComparator.define(ChukwaRecordKey.class, |
File | Line |
---|---|
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobLogHistoryProcessor.java | 156 |
org/apache/hadoop/chukwa/extraction/demux/processor/mapper/JobLogHistoryProcessor.java | 198 |
key.setKey("JobLogHist/SHUFFLE/" + keys.get("JOBID") + "/" + keys.get("START_TIME")); key.setReduceType("JobLogHistoryReduceProcessor"); record = new ChukwaRecord(); record.setTime(Long.parseLong(keys.get("START_TIME"))); record.add("JOBID", keys.get("JOBID")); record.add("START_TIME", keys.get("START_TIME")); record.add(Record.tagsField, chunk.getTags()); // log.info("JobLogHist/SHUFFLE/S"); output.collect(key, record); } else if (keys.get("RECORD_TYPE").equalsIgnoreCase("ReduceAttempt") |
File | Line |
---|---|
org/apache/hadoop/chukwa/ChukwaArchiveKey.java | 270 |
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordKey.java | 210 |
{ int i = org.apache.hadoop.record.Utils.readVInt(b, s); int z = org.apache.hadoop.record.Utils.getVIntSize(i); s += (z + i); l -= (z + i); } { int i = org.apache.hadoop.record.Utils.readVInt(b, s); int z = org.apache.hadoop.record.Utils.getVIntSize(i); s += (z + i); l -= (z + i); } |
File | Line |
---|---|
org/apache/hadoop/chukwa/hicc/ClusterConfig.java | 30 |
org/apache/hadoop/chukwa/util/ClusterConfig.java | 29 |
static public String getContents(File aFile) { // ...checks on aFile are elided StringBuffer contents = new StringBuffer(); try { // use buffering, reading one line at a time // FileReader always assumes default encoding is OK! BufferedReader input = new BufferedReader(new FileReader(aFile)); try { String line = null; // not declared within while loop /* * readLine is a bit quirky : it returns the content of a line MINUS the * newline. it returns null only for the END of the stream. it returns * an empty String if two newlines appear in a row. */ while ((line = input.readLine()) != null) { contents.append(line); contents.append(System.getProperty("line.separator")); } } finally { input.close(); } } catch (IOException ex) { ex.printStackTrace(); } return contents.toString(); } public ClusterConfig() { |
File | Line |
---|---|
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java | 250 |
org/apache/hadoop/chukwa/analysis/salsa/visualization/Swimlanes.java | 274 |
if (this.collate_reduces && curr_state.equals("reduce_reducer")) { this.plot_tab.setFloat(rownum,"ycoord",(float)counter); ArrayList<Tuple> alt = this.reducepart_hash.get(this.plot_tab.getString(rownum,"friendly_id")); Object [] tarr = alt.toArray(); for (int i = 0; i < tarr.length; i++) ((Tuple)tarr[i]).setFloat("ycoord",(float)counter); counter++; } else if (!curr_state.equals("reduce_sort") && !curr_state.equals("reduce_shufflewait")) { |
File | Line |
---|---|
org/apache/hadoop/chukwa/inputtools/mdl/LoaderServer.java | 74 |
org/apache/hadoop/chukwa/util/PidFile.java | 117 |
pidFilesb.append(pidDir).append(File.separator).append(name).append(".pid"); String pidFileName = pidFilesb.toString(); File pidFile = new File(pidFileName); if (!pidFile.exists()) { log.error("Delete pid file, No such file or directory: " + pidFileName); } else { try { lock.release(); pidFileOutput.close(); } catch (IOException e) { log.error("Unable to release file lock: " + pidFileName); } } boolean result = pidFile.delete(); if (!result) { log.error("Delete pid file failed, " + pidFileName); } } |
File | Line |
---|---|
org/apache/hadoop/chukwa/extraction/demux/DailyChukwaRecordRolling.java | 160 |
org/apache/hadoop/chukwa/extraction/demux/HourlyChukwaRecordRolling.java | 116 |
new HourlyChukwaRecordRolling(), mergeArgs, deleteRawdata); List<RecordMerger> allMerge = new ArrayList<RecordMerger>(); if (rollInSequence) { merge.run(); } else { allMerge.add(merge); merge.start(); } // join all Threads if (!rollInSequence) { while (allMerge.size() > 0) { RecordMerger m = allMerge.remove(0); try { m.join(); } catch (InterruptedException e) { } } } // End if (!rollInSequence) // Delete the processed dataSourceFS FileUtil.fullyDelete(fs, dataSourceFS.getPath()); } // End for(FileStatus dataSourceFS : dataSourcesFS) |
File | Line |
---|---|
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java | 139 |
org/apache/hadoop/chukwa/extraction/engine/ChukwaRecordJT.java | 167 |
{ org.apache.hadoop.record.Index _rio_midx1 = _rio_a .startMap("mapFields"); mapFields = new java.util.TreeMap<String, org.apache.hadoop.record.Buffer>(); for (; !_rio_midx1.done(); _rio_midx1.incr()) { String _rio_k1; _rio_k1 = _rio_a.readString("_rio_k1"); org.apache.hadoop.record.Buffer _rio_v1; _rio_v1 = _rio_a.readBuffer("_rio_v1"); mapFields.put(_rio_k1, _rio_v1); } _rio_a.endMap("mapFields"); } |