Examples of ChukwaRecord


Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

            log.debug("skip line:" + lines[i]);
            i++;
          }
        }
        while (i < lines.length) {
          ChukwaRecord record = null;

          if (lines[i].indexOf("avg-cpu") >= 0
              || lines[i].indexOf("Device") >= 0) {
            headers = parseHeader(lines[i]);
            i++;
          }
          String data[] = parseData(lines[i]);
          if (headers[0].equals("avg-cpu:")) {
            log.debug("Matched CPU-Utilization");
            record = new ChukwaRecord();
            key = new ChukwaRecordKey();
            buildGenericRecord(record, null, d.getTime(), "SystemMetrics");
          } else if (headers[0].equals("Device:")) {
            log.debug("Matched Iostat");
            record = new ChukwaRecord();
            key = new ChukwaRecordKey();
            buildGenericRecord(record, null, d.getTime(), "SystemMetrics");
          } else {
            log.debug("No match:" + headers[0]);
          }
          if (record != null) {
            int j = 0;
            log.debug("Data Length: " + data.length);
            while (j < data.length) {
              log.debug("header:" + headers[j] + " data:" + data[j]);
              if (!headers[j].equals("avg-cpu:")) {
                try {
                  // Filter out overflow values for older linux systems
                  long x=Long.parseLong(data[j]);
                  if(x<100000000000L) {
                    record.add(headers[j],data[j]);
                  }
                } catch(NumberFormatException ex) {
                  record.add(headers[j],data[j]);
                }
              }
              j++;
            }
            record.setTime(d.getTime());
            if (data.length > 3) {
              output.collect(key, record);
            }
          }
          i++;
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

  protected void parse(String recordEntry,
      OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter)
      throws Throwable
  {
    Log4JMetricsContextChukwaRecord record = new Log4JMetricsContextChukwaRecord(recordEntry);
    ChukwaRecord chukwaRecord = record.getChukwaRecord();
    this.buildGenericRecord(chukwaRecord, null, record.getTimestamp(), record.getRecordType());
    output.collect(key, chukwaRecord);
  }
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

  @Override
  protected void parse(String recordEntry,
      OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter)
      throws Throwable {
    ChukwaRecord record = new ChukwaRecord();
    String[] parts = recordEntry.split("\\s");
    record.add("timestamp", parts[0]);
    record.add(parts[1], parts[2]);
    key.setKey(parts[0]+"/"+parts[1]+"/"+parts[0]);
    long timestamp = Long.parseLong(parts[0]);
    this.buildGenericRecord(record, null, timestamp, reduceType);
    output.collect(key, record);   
  }
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

  }

  public void process(ChukwaRecordKey key, Iterator<ChukwaRecord> values,
                      OutputCollector<ChukwaRecordKey, ChukwaRecord> output,
                      Reporter reporter) {
    ChukwaRecord record = new ChukwaRecord();
    record.add("MockReduceProcessorKey", "MockReduceProcessorValue");

    try {
      output.collect(key, record);
    } catch (IOException e) {
      throw new RuntimeException(e);
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

      log.debug("Top Processor Matches");

      try {
        Date d = sdf.parse(matcher.group(1).trim());

        ChukwaRecord record = new ChukwaRecord();
        String[] lines = recordEntry.split("\n");
        int i = 0;
        if (lines.length < 2) {
          return;
        }
        String summaryString = "";
        while (!lines[i].equals("")) {
          summaryString = summaryString + lines[i] + "\n";
          i++;
        }
        i++;
        record = new ChukwaRecord();
        key = new ChukwaRecordKey();
        parseSummary(record, summaryString);
        this.buildGenericRecord(record, null, d.getTime(), reduceType);
        output.collect(key, record);

        StringBuffer buffer = new StringBuffer();
        // FIXME please validate this
        while (i < lines.length) {
          record = null;
          buffer.append(lines[i] + "\n");
          i++;

        }
        record = new ChukwaRecord();
        key = new ChukwaRecordKey();
        this.buildGenericRecord(record, buffer.toString(), d.getTime(), recordType);
        // Output Top info to database
        output.collect(key, record);
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

      long offset = 0;

      // HdfsWriter.HdfsWriterKey key = new HdfsWriter.HdfsWriterKey();
      ChukwaRecordKey key = new ChukwaRecordKey();
      ChukwaRecord record = new ChukwaRecord();

      while (r.next(key, record)) {
        lineCount++;

        System.out.println("NameNodeParser Line ["
            + record.getValue(Record.bodyField) + "]");

        if (record != null) {
          timestamp = record.getTime();
          if (timestamp < t0) {
            System.out.println("Line not in range. Skipping: "
                + record.getValue(Record.bodyField));
            System.out.println("Search for: " + new Date(t0) + " is :"
                + new Date(timestamp));
            continue;
          } else if ((timestamp < t1) && (offset < maxOffset)) // JB (epochTS <
                                                               // maxDate)
          {

            System.out
                .println("In Range: " + record.getValue(Record.bodyField));
            boolean valid = false;

            if ((filter == null || filter.equals(""))) {
              valid = true;
            } else if (isValid(record, filter)) {
              valid = true;
            }

            if (valid) {
              records.add(record);
              record = new ChukwaRecord();
              listSize = records.size();
              if (listSize > maxRows) {
                records.remove(0);
                System.out.println("==========>>>>>REMOVING: "
                    + record.getValue(Record.bodyField));
              }
            } else {
              System.out
                  .println("In Range ==================>>>>>>>>> OUT Regex: "
                      + record.getValue(Record.bodyField));
            }

          } else {
            System.out.println("Line out of range. Stopping now: "
                + record.getValue(Record.bodyField));
            break;
          }
        }

      }
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

    // + chunk.getDataType() + "]");

    try {

      HashMap<String, String> keys = new HashMap<String, String>();
      ChukwaRecord record = null;

      int firstSep = recordEntry.indexOf(" ");
      keys.put("RECORD_TYPE", recordEntry.substring(0, firstSep));
      // log.info("JobLogHistoryProcessor Add field: [RECORD_TYPE]["
      // + keys.get("RECORD_TYPE") + "]");

      String body = recordEntry.substring(firstSep);

      internalMatcher.reset(body);

      // String fieldName = null;
      // String fieldValue = null;

      while (internalMatcher.matches()) {

        keys.put(internalMatcher.group(1).trim(), internalMatcher.group(2)
            .trim());

        // TODO Remove debug info before production
        // fieldName = internalMatcher.group(1).trim();
        // fieldValue = internalMatcher.group(2).trim();
        // log.info("JobLogHistoryProcessor Add field: [" + fieldName +
        // "][" + fieldValue +"]" );
        // log.info("EOL : [" + internalMatcher.group(3) + "]" );
        internalMatcher.reset(internalMatcher.group(3));
      }

      if (!keys.containsKey("JOBID")) {
        // Extract JobID from taskID
        // JOBID = "job_200804210403_0005"
        // TASKID = "tip_200804210403_0005_m_000018"
        String jobId = keys.get("TASKID");
        int idx1 = jobId.indexOf('_', 0);
        int idx2 = jobId.indexOf('_', idx1 + 1);
        idx2 = jobId.indexOf('_', idx2 + 1);
        keys.put("JOBID", jobId.substring(idx1 + 1, idx2));
        // log.info("JobLogHistoryProcessor Add field: [JOBID]["
        // + keys.get("JOBID") + "]");
      } else {
        String jobId = keys.get("JOBID").replace("_", "").substring(3);
        keys.put("JOBID", jobId);
      }
      // if (keys.get("RECORD_TYPE").equalsIgnoreCase("Job") &&
      // keys.containsKey("SUBMIT_TIME"))
      // {
      // // Job JOBID="job_200804210403_0005" JOBNAME="MY_JOB"
      // USER="userxxx"
      // // SUBMIT_TIME="1208760436751"
      // JOBCONF="/mapredsystem/xxx.yyy.com/job_200804210403_0005/job.xml"
      //         
      //         
      // }
      // else if (keys.get("RECORD_TYPE").equalsIgnoreCase("Job") &&
      // keys.containsKey("LAUNCH_TIME"))
      // {
      // // Job JOBID="job_200804210403_0005" LAUNCH_TIME="1208760437110"
      // TOTAL_MAPS="5912" TOTAL_REDUCES="739"
      //         
      // }
      // else if (keys.get("RECORD_TYPE").equalsIgnoreCase("Job") &&
      // keys.containsKey("FINISH_TIME"))
      // {
      // // Job JOBID="job_200804210403_0005" FINISH_TIME="1208760906816"
      // JOB_STATUS="SUCCESS" FINISHED_MAPS="5912" FINISHED_REDUCES="739"
      // FAILED_MAPS="0" FAILED_REDUCES="0"
      // // COUNTERS="File Systems.Local bytes read:1735053407244,File
      // Systems.Local bytes written:2610106384012,File Systems.HDFS bytes
      // read:801605644910,File Systems.HDFS bytes written:44135800,
      // // Job Counters .Launched map tasks:5912,Job Counters .Launched
      // reduce tasks:739,Job Counters .Data-local map tasks:5573,Job
      // Counters .Rack-local map tasks:316,Map-Reduce Framework.
      // // Map input records:9410696067,Map-Reduce Framework.Map output
      // records:9410696067,Map-Reduce Framework.Map input
      // bytes:801599188816,Map-Reduce Framework.Map output
      // bytes:784427968116,
      // // Map-Reduce Framework.Combine input records:0,Map-Reduce
      // Framework.Combine output records:0,Map-Reduce Framework.Reduce
      // input groups:477265,Map-Reduce Framework.Reduce input
      // records:739000,
      // // Map-Reduce Framework.Reduce output records:739000"
      //         
      // }
      // else
      if (keys.get("RECORD_TYPE").equalsIgnoreCase("MapAttempt")
          && keys.containsKey("START_TIME")) {
        // MapAttempt TASK_TYPE="MAP"
        // TASKID="tip_200804210403_0005_m_000018"
        // TASK_ATTEMPT_ID="task_200804210403_0005_m_000018_0"
        // START_TIME="1208760437531"
        // HOSTNAME="tracker_xxx.yyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:53734"

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/Map/" + keys.get("JOBID") + "/"
            + keys.get("START_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("START_TIME")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("START_TIME"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/Map/S");
        output.collect(key, record);

      } else if (keys.get("RECORD_TYPE").equalsIgnoreCase("MapAttempt")
          && keys.containsKey("FINISH_TIME")) {
        // MapAttempt TASK_TYPE="MAP"
        // TASKID="tip_200804210403_0005_m_005494"
        // TASK_ATTEMPT_ID="task_200804210403_0005_m_005494_0"
        // TASK_STATUS="SUCCESS"
        // FINISH_TIME="1208760624124"
        // HOSTNAME="tracker_xxxx.yyyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:55491"

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/Map/" + keys.get("JOBID") + "/"
            + keys.get("FINISH_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("FINISH_TIME")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("FINISH_TIME", keys.get("FINISH_TIME"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/Map/E");
        output.collect(key, record);
      }

      else if (keys.get("RECORD_TYPE").equalsIgnoreCase("ReduceAttempt")
          && keys.containsKey("START_TIME")) {
        // ReduceAttempt TASK_TYPE="REDUCE"
        // TASKID="tip_200804210403_0005_r_000138"
        // TASK_ATTEMPT_ID="task_200804210403_0005_r_000138_0"
        // START_TIME="1208760454885"
        // HOSTNAME="tracker_xxxx.yyyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:51947"

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/SHUFFLE/" + keys.get("JOBID") + "/"
            + keys.get("START_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("START_TIME")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("START_TIME"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/SHUFFLE/S");
        output.collect(key, record);

      } else if (keys.get("RECORD_TYPE").equalsIgnoreCase("ReduceAttempt")
          && keys.containsKey("FINISH_TIME")) {
        // ReduceAttempt TASK_TYPE="REDUCE"
        // TASKID="tip_200804210403_0005_r_000138"
        // TASK_ATTEMPT_ID="task_200804210403_0005_r_000138_0"
        // TASK_STATUS="SUCCESS" SHUFFLE_FINISHED="1208760787167"
        // SORT_FINISHED="1208760787354" FINISH_TIME="1208760802395"
        // HOSTNAME="tracker__xxxx.yyyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:51947"

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/SHUFFLE/" + keys.get("JOBID") + "/"
            + keys.get("SHUFFLE_FINISHED"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SHUFFLE_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("SHUFFLE_FINISHED", keys.get("SHUFFLE_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/SHUFFLE/E");
        output.collect(key, record);

        // SORT
        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/SORT/" + keys.get("JOBID") + "/"
            + keys.get("SHUFFLE_FINISHED"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SHUFFLE_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("SHUFFLE_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/SORT/S");
        output.collect(key, record);

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/SORT/" + keys.get("JOBID") + "/"
            + keys.get("SORT_FINISHED"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SORT_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("SORT_FINISHED", keys.get("SORT_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/SORT/E");
        output.collect(key, record);

        // Reduce
        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/REDUCE/" + keys.get("JOBID") + "/"
            + keys.get("SORT_FINISHED"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SORT_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("SORT_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/REDUCE/S");
        output.collect(key, record);

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/REDUCE/" + keys.get("JOBID") + "/"
            + keys.get("FINISH_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SORT_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("FINISH_TIME", keys.get("SORT_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/REDUCE/E");
        output.collect(key, record);

      } else if (keys.get("RECORD_TYPE").equalsIgnoreCase("Job")) {
        // 1
        // Job JOBID="job_200809062051_0001" JOBNAME="wordcount" USER="xxx"
        // SUBMIT_TIME="1208760906812"
        // JOBCONF=
        // "/user/xxx/mapredsystem/563976.yyy.zzz.com/job_200809062051_0001/job.xml"

        // 2
        // Job JOBID="job_200809062051_0001" LAUNCH_TIME="1208760906816"
        // TOTAL_MAPS="3" TOTAL_REDUCES="7"

        // 3
        // Job JOBID="job_200804210403_0005" FINISH_TIME="1208760906826"
        // JOB_STATUS="SUCCESS" FINISHED_MAPS="5912"
        // FINISHED_REDUCES="739" FAILED_MAPS="0" FAILED_REDUCES="0"
        // COUNTERS="File Systems.Local bytes read:1735053407244,File
        // Systems.Local bytes written:2610106384012,File Systems.HDFS
        // bytes read:801605644910,File Systems.HDFS bytes
        // written:44135800,
        // Job Counters .Launched map tasks:5912,Job Counters .Launched
        // reduce tasks:739,Job Counters .Data-local map tasks:5573,Job
        // Counters .Rack-local map tasks:316,Map-Reduce Framework.
        // Map input records:9410696067,Map-Reduce Framework.Map output
        // records:9410696067,Map-Reduce Framework.Map input
        // bytes:801599188816,Map-Reduce Framework.Map output
        // bytes:784427968116,
        // Map-Reduce Framework.Combine input records:0,Map-Reduce
        // Framework.Combine output records:0,Map-Reduce
        // Framework.Reduce input groups:477265,Map-Reduce
        // Framework.Reduce input records:739000,
        // Map-Reduce Framework.Reduce output records:739000"

        record = new ChukwaRecord();
        key = new ChukwaRecordKey();
        buildGenericRecord(record, null, Long
            .parseLong(keys.get("FINISH_TIME")), "MRJob");
        if (keys.containsKey("COUNTERS")) {
          extractCounters(record, keys.get("COUNTERS"));
        }

        key = new ChukwaRecordKey();
        key.setKey("MRJob/" + keys.get("JOBID"));
        key.setReduceType("MRJobReduceProcessor");

        record = new ChukwaRecord();
        record.add(Record.tagsField, chunk.getTags());
        if (keys.containsKey("SUBMIT_TIME")) {
          record.setTime(Long.parseLong(keys.get("SUBMIT_TIME")));
        } else if (keys.containsKey("LAUNCH_TIME")) {
          record.setTime(Long.parseLong(keys.get("LAUNCH_TIME")));
        } else if (keys.containsKey("FINISH_TIME")) {
          record.setTime(Long.parseLong(keys.get("FINISH_TIME")));
        }

        Iterator<String> it = keys.keySet().iterator();
        while (it.hasNext()) {
          String field = it.next();
          record.add(field, keys.get(field));
        }

        output.collect(key, record);
      }

      if (keys.containsKey("TASK_TYPE")
          && keys.containsKey("COUNTERS")
          && (keys.get("TASK_TYPE").equalsIgnoreCase("REDUCE") || keys.get(
              "TASK_TYPE").equalsIgnoreCase("MAP"))) {
        // MAP
        // Task TASKID="tip_200804210403_0005_m_000154" TASK_TYPE="MAP"
        // TASK_STATUS="SUCCESS" FINISH_TIME="1208760463883"
        // COUNTERS="File Systems.Local bytes read:159265655,File
        // Systems.Local bytes written:318531310,
        // File Systems.HDFS bytes read:145882417,Map-Reduce
        // Framework.Map input records:1706604,
        // Map-Reduce Framework.Map output records:1706604,Map-Reduce
        // Framework.Map input bytes:145882057,
        // Map-Reduce Framework.Map output bytes:142763253,Map-Reduce
        // Framework.Combine input records:0,Map-Reduce
        // Framework.Combine output records:0"

        // REDUCE
        // Task TASKID="tip_200804210403_0005_r_000524"
        // TASK_TYPE="REDUCE" TASK_STATUS="SUCCESS"
        // FINISH_TIME="1208760877072"
        // COUNTERS="File Systems.Local bytes read:1179319677,File
        // Systems.Local bytes written:1184474889,File Systems.HDFS
        // bytes written:59021,
        // Map-Reduce Framework.Reduce input groups:684,Map-Reduce
        // Framework.Reduce input records:1000,Map-Reduce
        // Framework.Reduce output records:1000"

        record = new ChukwaRecord();
        key = new ChukwaRecordKey();
        buildGenericRecord(record, null, Long
            .parseLong(keys.get("FINISH_TIME")), "SizeVsFinishTime");
        extractCounters(record, keys.get("COUNTERS"));
        record.add("JOBID", keys.get("JOBID"));
        record.add("TASKID", keys.get("TASKID"));
        record.add("TASK_TYPE", keys.get("TASK_TYPE"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("MR_Graph +1");
        output.collect(key, record);

      }
    } catch (IOException e) {
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

  protected void parse(String recordEntry,
      OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter)
      throws Throwable {
    JSONObject json = (JSONObject) JSONValue.parse(recordEntry);
    long timestamp = ((Long)json.get("timestamp")).longValue();
    ChukwaRecord record = new ChukwaRecord();
    Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
    cal.setTimeInMillis(timestamp);
    cal.set(Calendar.SECOND, 0);
    cal.set(Calendar.MILLISECOND, 0);
    JSONArray cpuList = (JSONArray) json.get("cpu");
    double combined = 0.0;
    double user = 0.0;
    double sys = 0.0;
    double idle = 0.0;
    for(int i = 0; i< cpuList.size(); i++) {
      JSONObject cpu = (JSONObject) cpuList.get(i);
      Iterator<String> keys = cpu.keySet().iterator();
      combined = combined + Double.parseDouble(cpu.get("combined").toString());
      user = user + Double.parseDouble(cpu.get("user").toString());
      sys = sys + Double.parseDouble(cpu.get("sys").toString());
      idle = idle + Double.parseDouble(cpu.get("idle").toString());
      while(keys.hasNext()) {
        String key = keys.next();
        record.add(key + "." + i, cpu.get(key).toString());
      }
    }
    combined = combined / cpuList.size();
    user = user / cpuList.size();
    sys = sys / cpuList.size();
    idle = idle / cpuList.size();
    record.add("combined", Double.toString(combined));
    record.add("user", Double.toString(user));
    record.add("idle", Double.toString(idle));   
    record.add("sys", Double.toString(sys));
    buildGenericRecord(record, null, cal.getTimeInMillis(), "cpu");
    output.collect(key, record);   

    record = new ChukwaRecord();
    record.add("Uptime", json.get("uptime").toString());
    JSONArray loadavg = (JSONArray) json.get("loadavg");
    record.add("LoadAverage.1", loadavg.get(0).toString());
    record.add("LoadAverage.5", loadavg.get(1).toString());
    record.add("LoadAverage.15", loadavg.get(2).toString());
    buildGenericRecord(record, null, cal.getTimeInMillis(), "system");
    output.collect(key, record);   

    record = new ChukwaRecord();
    JSONObject memory = (JSONObject) json.get("memory");
    Iterator<String> memKeys = memory.keySet().iterator();
    while(memKeys.hasNext()) {
      String key = memKeys.next();
      record.add(key, memory.get(key).toString());
    }
    buildGenericRecord(record, null, cal.getTimeInMillis(), "memory");
    output.collect(key, record);   
   
    double rxBytes = 0;
    double rxDropped = 0;
    double rxErrors = 0;
    double rxPackets = 0;
    double txBytes = 0;
    double txCollisions = 0;
    double txErrors = 0;
    double txPackets = 0;
    record = new ChukwaRecord();
    JSONArray netList = (JSONArray) json.get("network");
    for(int i = 0;i < netList.size(); i++) {
      JSONObject netIf = (JSONObject) netList.get(i);
      Iterator<String> keys = netIf.keySet().iterator();
      while(keys.hasNext()) {
        String key = keys.next();
        record.add(key + "." + i, netIf.get(key).toString());
        if(i!=0) {
          if(key.equals("RxBytes")) {
            rxBytes = rxBytes + (Long) netIf.get(key);
          } else if(key.equals("RxDropped")) {
            rxDropped = rxDropped + (Long) netIf.get(key);
          } else if(key.equals("RxErrors")) {         
            rxErrors = rxErrors + (Long) netIf.get(key);
          } else if(key.equals("RxPackets")) {
            rxPackets = rxPackets + (Long) netIf.get(key);
          } else if(key.equals("TxBytes")) {
            txBytes = txBytes + (Long) netIf.get(key);
          } else if(key.equals("TxCollisions")) {
            txCollisions = txCollisions + (Long) netIf.get(key);
          } else if(key.equals("TxErrors")) {
            txErrors = txErrors + (Long) netIf.get(key);
          } else if(key.equals("TxPackets")) {
            txPackets = txPackets + (Long) netIf.get(key);
          }
        }
      }
    }
    buildGenericRecord(record, null, cal.getTimeInMillis(), "network");
    record.add("RxBytes", Double.toString(rxBytes));
    record.add("RxDropped", Double.toString(rxDropped));
    record.add("RxErrors", Double.toString(rxErrors));
    record.add("RxPackets", Double.toString(rxPackets));
    record.add("TxBytes", Double.toString(txBytes));
    record.add("TxCollisions", Double.toString(txCollisions));
    record.add("TxErrors", Double.toString(txErrors));
    record.add("TxPackets", Double.toString(txPackets));
    output.collect(key, record);   
   
    double readBytes = 0;
    double reads = 0;
    double writeBytes = 0;
    double writes = 0;
    record = new ChukwaRecord();
    JSONArray diskList = (JSONArray) json.get("disk");
    for(int i = 0;i < diskList.size(); i++) {
      JSONObject disk = (JSONObject) diskList.get(i);
      Iterator<String> keys = disk.keySet().iterator();
      while(keys.hasNext()) {
        String key = keys.next();
        record.add(key + "." + i, disk.get(key).toString());
        if(key.equals("ReadBytes")) {
          readBytes = readBytes + (Long) disk.get("ReadBytes");
        } else if(key.equals("Reads")) {
          reads = reads + (Long) disk.get("Reads");
        } else if(key.equals("WriteBytes")) {
          writeBytes = writeBytes + (Long) disk.get("WriteBytes");
        } else if(key.equals("Writes")) {
          writes = writes + (Long) disk.get("Writes");
        }
      }
    }
    record.add("ReadBytes", Double.toString(readBytes));
    record.add("Reads", Double.toString(reads));
    record.add("WriteBytes", Double.toString(writeBytes));
    record.add("Writes", Double.toString(writes));   
    buildGenericRecord(record, null, cal.getTimeInMillis(), "disk");
    output.collect(key, record);
   
    record = new ChukwaRecord();
    record.add("cluster", chunk.getTag("cluster"));
    buildGenericRecord(record, null, cal.getTimeInMillis(), "tags");
    output.collect(key, record);
  }
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

      String jobID = "";
        matcher = jobPattern.matcher(capp);
        if(matcher.matches()) {
          jobID=matcher.group(2);
        }
        ChukwaRecord record = new ChukwaRecord();
        ChukwaRecord jobConfRecord = new ChukwaRecord();
      DocumentBuilderFactory docBuilderFactory
        = DocumentBuilderFactory.newInstance();
      //ignore all comments inside the xml file
      docBuilderFactory.setIgnoringComments(true);
      try {
          DocumentBuilder builder = docBuilderFactory.newDocumentBuilder();
          Document doc = null;
          String fileName = "test_"+randomNumber.nextInt();
          File tmp = new File(fileName);
          FileOutputStream out = new FileOutputStream(tmp);
          out.write(recordEntry.getBytes());
          out.close();
        doc = builder.parse(fileName);
        Element root = doc.getDocumentElement();
        if (!"configuration".equals(root.getTagName()))
            log.fatal("bad conf file: top-level element not <configuration>");
        NodeList props = root.getChildNodes();
            JSONObject json = new JSONObject();
            String queue = "default";
   
        for (int i = 0; i < props.getLength(); i++) {
            Node propNode = props.item(i);
            if (!(propNode instanceof Element))
                continue;
            Element prop = (Element)propNode;
            if (!"property".equals(prop.getTagName()))
                log.warn("bad conf file: element not <property>");
            NodeList fields = prop.getChildNodes();
            String attr = null;
            String value = null;
            for (int j = 0; j < fields.getLength(); j++) {
                Node fieldNode = fields.item(j);
                if (!(fieldNode instanceof Element))
                    continue;
                Element field = (Element)fieldNode;
                if ("name".equals(field.getTagName()) && field.hasChildNodes())
                    attr = ((Text)field.getFirstChild()).getData().trim();
                if ("value".equals(field.getTagName()) && field.hasChildNodes())
                    value = ((Text)field.getFirstChild()).getData();
            }
           
            // Ignore this parameter if it has already been marked as 'final'
            if (attr != null && value != null) {
                json.put(attr, value);
                if(attr.intern()=="mapred.job.queue.name".intern()) {
                    queue=value;
                }
                jobConfRecord.add("job_conf." + attr, value);
            }
        }
        record.add("JOBCONF-JSON", json.toString());
        record.add("mapred.job.queue.name", queue);
        record.add("JOBID", "job_" + jobID);
        buildGenericRecord(record, null, time, jobData);
        calendar.setTimeInMillis(time);
        calendar.set(Calendar.MINUTE, 0);
        calendar.set(Calendar.SECOND, 0);
        calendar.set(Calendar.MILLISECOND, 0);
        key.setKey("" + calendar.getTimeInMillis() + "/job_" + jobID + "/" + time);
        output.collect(key, record);

        jobConfRecord.add("JOBID", "job_" + jobID);
        buildGenericRecord(jobConfRecord, null, time, jobConfData);
        output.collect(key, jobConfRecord);
           
        tmp.delete();
      } catch(Exception e) {
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

          // Skip to the average lines
          log.debug("skip:" + lines[i]);
          i++;
        }
        while (i < lines.length) {
          ChukwaRecord record = null;
          if (lines[i].equals("")) {
            i++;
            headers = parseHeader(lines[i]);
            i++;
          }
          String data[] = parseData(lines[i]);

          // FIXME please validate this
          if (headers[1].equals("IFACE") && headers[2].equals("rxpck/s")) {
            log.debug("Matched Sar-Network");

            record = new ChukwaRecord();
            key = new ChukwaRecordKey();
            this.buildGenericRecord(record, null, d.getTime(), reduceType);
          } else if (headers[1].equals("IFACE") && headers[2].equals("rxerr/s")) {
            log.debug("Matched Sar-Network");

            record = new ChukwaRecord();
            key = new ChukwaRecordKey();
            this.buildGenericRecord(record, null, d.getTime(), reduceType);
          } else if (headers[1].equals("kbmemfree")) {
            log.debug("Matched Sar-Memory");

            record = new ChukwaRecord();
            key = new ChukwaRecordKey();
            this.buildGenericRecord(record, null, d.getTime(), reduceType);
          } else if (headers[1].equals("totsck")) {
            log.debug("Matched Sar-NetworkSockets");

            record = new ChukwaRecord();
            key = new ChukwaRecordKey();
            this.buildGenericRecord(record, null, d.getTime(), reduceType);
          } else if (headers[1].equals("runq-sz")) {
            log.debug("Matched Sar-LoadAverage");

            record = new ChukwaRecord();
            key = new ChukwaRecordKey();
            this.buildGenericRecord(record, null, d.getTime(), reduceType);
          } else {
            log.debug("No match:" + headers[1] + " " + headers[2]);
          }
          if (record != null) {
            int j = 0;

            log.debug("Data Length: " + data.length);
            while (j < data.length) {
              log.debug("header:" + headers[j] + " data:" + data[j]);
             
                //special case code to work around peculiar versions of Sar
              if(headers[j].equals("rxkB/s")) {
                record.add("rxbyt/s", Double.toString(Double.parseDouble(data[j]) * 1000));
              } else if(headers[j].equals("txkB/s")){
                record.add("txbyt/s", Double.toString(Double.parseDouble(data[j]) * 1000));
              } else if (!headers[j].equals("Average:")) { //common case
                record.add(headers[j], data[j]);
              }
              j++;
            }

            output.collect(key, record);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.