Examples of ChukwaRecordKey


Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

      Calendar calendar = Calendar.getInstance();
      calendar.setTimeInMillis(ts);
      calendar.set(Calendar.MINUTE, 0);
      calendar.set(Calendar.SECOND, 0);
      calendar.set(Calendar.MILLISECOND, 0);
      ChukwaRecordKey key = new ChukwaRecordKey();
      key.setKey("" + calendar.getTimeInMillis() + "/" + chunk.getDataType()
          + "/" + chunk.getSource() + "/" + ts);
      key.setReduceType(chunk.getDataType() + "InError");

      record.setTime(ts);

      record.add(Record.tagsField, chunk.getTags());
      record.add(Record.sourceField, chunk.getSource());
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

      // Data
      ChukwaRecord record = null;

      for (int i = 1; i < lines.length; i++) {
        values = lines[i].split("[\\s]++");
        key = new ChukwaRecordKey();
        record = new ChukwaRecord();
        this.buildGenericRecord(record, null, d.getTime(), "Df");

        record.add(headerCols[0], values[0]);
        record.add(headerCols[1], values[1]);
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

        return;
      }
      line.setLogType("JobData");
    }
   
    key = new ChukwaRecordKey();
    ChukwaRecord record = new ChukwaRecord();
    this.buildGenericRecord(record, null, -1l, line.getLogType());
   
    for (Entry<String, String> entry : line.entrySet()) {
      record.add(entry.getKey(), entry.getValue());
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

      int idx = jobConf.indexOf("mapredsystem/");
      idx += 13;
      int idx2 = jobConf.indexOf(".", idx);
      data.put("HodId", jobConf.substring(idx, idx2));

      ChukwaRecordKey newKey = new ChukwaRecordKey();
      newKey.setKey("" + initTime);
      newKey.setReduceType("MRJob");

      ChukwaRecord newRecord = new ChukwaRecord();
      newRecord.add(Record.tagsField, record.getValue(Record.tagsField));
      newRecord.setTime(initTime);
      newRecord.add(Record.tagsField, record.getValue(Record.tagsField));
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

          summaryString = summaryString + lines[i] + "\n";
          i++;
        }
        i++;
        record = new ChukwaRecord();
        key = new ChukwaRecordKey();
        parseSummary(record, summaryString);
        this.buildGenericRecord(record, null, d.getTime(), "SystemMetrics");
        output.collect(key, record);

        StringBuffer buffer = new StringBuffer();
        // FIXME please validate this
        while (i < lines.length) {
          record = null;
          buffer.append(lines[i] + "\n");
          i++;

        }
        record = new ChukwaRecord();
        key = new ChukwaRecordKey();
        this.buildGenericRecord(record, buffer.toString(), d.getTime(), "Top");
        // Output Top info to database
        output.collect(key, record);

        // End of parsing
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

      OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter)
      throws Throwable {
    LogEntry log = new LogEntry(recordEntry);
    PsOutput ps = new PsOutput(log.getBody());
    for (HashMap<String, String> processInfo : ps.getProcessList()) {
      key = new ChukwaRecordKey();
      ChukwaRecord record = new ChukwaRecord();
      this.buildGenericRecord(record, null, log.getDate().getTime(), "Ps");
      for (Entry<String, String> entry : processInfo.entrySet()) {
        record.add(entry.getKey(), entry.getValue());
      }
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

          }
          String data[] = parseData(lines[i]);
          if (headers[0].equals("avg-cpu:")) {
            log.debug("Matched CPU-Utilization");
            record = new ChukwaRecord();
            key = new ChukwaRecordKey();
            buildGenericRecord(record, null, d.getTime(), "SystemMetrics");
          } else if (headers[0].equals("Device:")) {
            log.debug("Matched Iostat");
            record = new ChukwaRecord();
            key = new ChukwaRecordKey();
            buildGenericRecord(record, null, d.getTime(), "SystemMetrics");
          } else {
            log.debug("No match:" + headers[0]);
          }
          if (record != null) {
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

        // TASKID="tip_200804210403_0005_m_000018"
        // TASK_ATTEMPT_ID="task_200804210403_0005_m_000018_0"
        // START_TIME="1208760437531"
        // HOSTNAME="tracker_xxx.yyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:53734"

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/Map/" + keys.get("JOBID") + "/"
            + keys.get("START_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("START_TIME")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("START_TIME"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/Map/S");
        output.collect(key, record);

      } else if (keys.get("RECORD_TYPE").equalsIgnoreCase("MapAttempt")
          && keys.containsKey("FINISH_TIME")) {
        // MapAttempt TASK_TYPE="MAP"
        // TASKID="tip_200804210403_0005_m_005494"
        // TASK_ATTEMPT_ID="task_200804210403_0005_m_005494_0"
        // TASK_STATUS="SUCCESS"
        // FINISH_TIME="1208760624124"
        // HOSTNAME="tracker_xxxx.yyyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:55491"

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/Map/" + keys.get("JOBID") + "/"
            + keys.get("FINISH_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("FINISH_TIME")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("FINISH_TIME", keys.get("FINISH_TIME"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/Map/E");
        output.collect(key, record);
      }

      else if (keys.get("RECORD_TYPE").equalsIgnoreCase("ReduceAttempt")
          && keys.containsKey("START_TIME")) {
        // ReduceAttempt TASK_TYPE="REDUCE"
        // TASKID="tip_200804210403_0005_r_000138"
        // TASK_ATTEMPT_ID="task_200804210403_0005_r_000138_0"
        // START_TIME="1208760454885"
        // HOSTNAME="tracker_xxxx.yyyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:51947"

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/SHUFFLE/" + keys.get("JOBID") + "/"
            + keys.get("START_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("START_TIME")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("START_TIME"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/SHUFFLE/S");
        output.collect(key, record);

      } else if (keys.get("RECORD_TYPE").equalsIgnoreCase("ReduceAttempt")
          && keys.containsKey("FINISH_TIME")) {
        // ReduceAttempt TASK_TYPE="REDUCE"
        // TASKID="tip_200804210403_0005_r_000138"
        // TASK_ATTEMPT_ID="task_200804210403_0005_r_000138_0"
        // TASK_STATUS="SUCCESS" SHUFFLE_FINISHED="1208760787167"
        // SORT_FINISHED="1208760787354" FINISH_TIME="1208760802395"
        // HOSTNAME="tracker__xxxx.yyyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:51947"

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/SHUFFLE/" + keys.get("JOBID") + "/"
            + keys.get("SHUFFLE_FINISHED"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SHUFFLE_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("SHUFFLE_FINISHED", keys.get("SHUFFLE_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/SHUFFLE/E");
        output.collect(key, record);

        // SORT
        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/SORT/" + keys.get("JOBID") + "/"
            + keys.get("SHUFFLE_FINISHED"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SHUFFLE_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("SHUFFLE_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/SORT/S");
        output.collect(key, record);

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/SORT/" + keys.get("JOBID") + "/"
            + keys.get("SORT_FINISHED"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SORT_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("SORT_FINISHED", keys.get("SORT_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/SORT/E");
        output.collect(key, record);

        // Reduce
        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/REDUCE/" + keys.get("JOBID") + "/"
            + keys.get("SORT_FINISHED"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SORT_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("SORT_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/REDUCE/S");
        output.collect(key, record);

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/REDUCE/" + keys.get("JOBID") + "/"
            + keys.get("FINISH_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SORT_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("FINISH_TIME", keys.get("SORT_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/REDUCE/E");
        output.collect(key, record);

      } else if (keys.get("RECORD_TYPE").equalsIgnoreCase("Job")) {
        // 1
        // Job JOBID="job_200809062051_0001" JOBNAME="wordcount" USER="xxx"
        // SUBMIT_TIME="1208760906812"
        // JOBCONF=
        // "/user/xxx/mapredsystem/563976.yyy.zzz.com/job_200809062051_0001/job.xml"

        // 2
        // Job JOBID="job_200809062051_0001" LAUNCH_TIME="1208760906816"
        // TOTAL_MAPS="3" TOTAL_REDUCES="7"

        // 3
        // Job JOBID="job_200804210403_0005" FINISH_TIME="1208760906826"
        // JOB_STATUS="SUCCESS" FINISHED_MAPS="5912"
        // FINISHED_REDUCES="739" FAILED_MAPS="0" FAILED_REDUCES="0"
        // COUNTERS="File Systems.Local bytes read:1735053407244,File
        // Systems.Local bytes written:2610106384012,File Systems.HDFS
        // bytes read:801605644910,File Systems.HDFS bytes
        // written:44135800,
        // Job Counters .Launched map tasks:5912,Job Counters .Launched
        // reduce tasks:739,Job Counters .Data-local map tasks:5573,Job
        // Counters .Rack-local map tasks:316,Map-Reduce Framework.
        // Map input records:9410696067,Map-Reduce Framework.Map output
        // records:9410696067,Map-Reduce Framework.Map input
        // bytes:801599188816,Map-Reduce Framework.Map output
        // bytes:784427968116,
        // Map-Reduce Framework.Combine input records:0,Map-Reduce
        // Framework.Combine output records:0,Map-Reduce
        // Framework.Reduce input groups:477265,Map-Reduce
        // Framework.Reduce input records:739000,
        // Map-Reduce Framework.Reduce output records:739000"

        record = new ChukwaRecord();
        key = new ChukwaRecordKey();
        buildGenericRecord(record, null, Long
            .parseLong(keys.get("FINISH_TIME")), "MRJob");
        if (keys.containsKey("COUNTERS")) {
          extractCounters(record, keys.get("COUNTERS"));
        }

        key = new ChukwaRecordKey();
        key.setKey("MRJob/" + keys.get("JOBID"));
        key.setReduceType("MRJobReduceProcessor");

        record = new ChukwaRecord();
        record.add(Record.tagsField, chunk.getTags());
        if (keys.containsKey("SUBMIT_TIME")) {
          record.setTime(Long.parseLong(keys.get("SUBMIT_TIME")));
        } else if (keys.containsKey("LAUNCH_TIME")) {
          record.setTime(Long.parseLong(keys.get("LAUNCH_TIME")));
        } else if (keys.containsKey("FINISH_TIME")) {
          record.setTime(Long.parseLong(keys.get("FINISH_TIME")));
        }

        Iterator<String> it = keys.keySet().iterator();
        while (it.hasNext()) {
          String field = it.next();
          record.add(field, keys.get(field));
        }

        output.collect(key, record);
      }

      if (keys.containsKey("TASK_TYPE")
          && keys.containsKey("COUNTERS")
          && (keys.get("TASK_TYPE").equalsIgnoreCase("REDUCE") || keys.get(
              "TASK_TYPE").equalsIgnoreCase("MAP"))) {
        // MAP
        // Task TASKID="tip_200804210403_0005_m_000154" TASK_TYPE="MAP"
        // TASK_STATUS="SUCCESS" FINISH_TIME="1208760463883"
        // COUNTERS="File Systems.Local bytes read:159265655,File
        // Systems.Local bytes written:318531310,
        // File Systems.HDFS bytes read:145882417,Map-Reduce
        // Framework.Map input records:1706604,
        // Map-Reduce Framework.Map output records:1706604,Map-Reduce
        // Framework.Map input bytes:145882057,
        // Map-Reduce Framework.Map output bytes:142763253,Map-Reduce
        // Framework.Combine input records:0,Map-Reduce
        // Framework.Combine output records:0"

        // REDUCE
        // Task TASKID="tip_200804210403_0005_r_000524"
        // TASK_TYPE="REDUCE" TASK_STATUS="SUCCESS"
        // FINISH_TIME="1208760877072"
        // COUNTERS="File Systems.Local bytes read:1179319677,File
        // Systems.Local bytes written:1184474889,File Systems.HDFS
        // bytes written:59021,
        // Map-Reduce Framework.Reduce input groups:684,Map-Reduce
        // Framework.Reduce input records:1000,Map-Reduce
        // Framework.Reduce output records:1000"

        record = new ChukwaRecord();
        key = new ChukwaRecordKey();
        buildGenericRecord(record, null, Long
            .parseLong(keys.get("FINISH_TIME")), "SizeVsFinishTime");
        extractCounters(record, keys.get("COUNTERS"));
        record.add("JOBID", keys.get("JOBID"));
        record.add("TASKID", keys.get("TASKID"));
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

      // log.info(">>>>>>>>>>>>>> Openning records [" + inputFile.getName()
      // +"][" + testFile.getName() +"]");
      goldReader = new SequenceFile.Reader(fs, inputFile, conf);
      testReader = new SequenceFile.Reader(fs, testFile, conf);

      ChukwaRecordKey goldKey = new ChukwaRecordKey();
      ChukwaRecord goldRecord = new ChukwaRecord();

      ChukwaRecordKey testKey = new ChukwaRecordKey();
      ChukwaRecord testRecord = new ChukwaRecord();

      // log.info(">>>>>>>>>>>>>> Start reading");
      while (goldReader.next(goldKey, goldRecord)) {
        testReader.next(testKey, testRecord);

        if (goldKey.compareTo(testKey) != 0) {
          log.info(">>>>>>>>>>>>>> Not the same Key");
          log.info(">>>>>>>>>>>>>> Record [" + goldKey.getKey() + "] ["
              + goldKey.getReduceType() + "]");
          log.info(">>>>>>>>>>>>>> Record [" + testKey.getKey() + "] ["
              + testKey.getReduceType() + "]");
          return false;
        }

        if (goldRecord.compareTo(testRecord) != 0) {
          log.info(">>>>>>>>>>>>>> Not the same Value");
          log.info(">>>>>>>>>>>>>> Record [" + goldKey.getKey() + "] ["
              + goldKey.getReduceType() + "]");
          log.info(">>>>>>>>>>>>>> Record [" + testKey.getKey() + "] ["
              + testKey.getReduceType() + "]");
          log.info(">>>>>>>>>>>>>> Gold Value [" + goldRecord.toString() + "]");
          log.info(">>>>>>>>>>>>>> Test value [" + testRecord.toString() + "]");

          return false;
        }
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

    JobConf conf = new JobConf();
    conf.set("chukwa.demux.reducer.default.processor", ",org.apache.hadoop.chukwa.extraction.demux.processor.reducer" +
            ".MockReduceProcessor");
    reducer.configure(conf);

    ChukwaRecordKey key = new ChukwaRecordKey("someReduceType", "someKey");
    ChukwaTestOutputCollector<ChukwaRecordKey, ChukwaRecord> output =
            new ChukwaTestOutputCollector<ChukwaRecordKey, ChukwaRecord>();

    reducer.reduce(key, null, output, Reporter.NULL);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.