Examples of ChukwaRecordKey


Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

          while (it.hasNext()) {
            jsonTs = it.next();
            jsonValue = jsonData.getString(jsonTs);

            record = new ChukwaRecord();
            key = new ChukwaRecordKey();
            this.buildGenericRecord(record, null, ts, "Ywatch");
            record.add("poller", poller);
            record.add("host", host);
            record.add("metricName", metricName);
            record.add("value", jsonValue);
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

      }
      log.debug("Parser Open [" + fileName + "]");

      long timestamp = 0;
      int listSize = 0;
      ChukwaRecordKey key = new ChukwaRecordKey();
      ChukwaRecord record = new ChukwaRecord();

      r = new SequenceFile.Reader(fs, new Path(fileName), conf);

      log.debug("readData Open2 [" + fileName + "]");
      if ((fileName.equals(res.fileName)) && (res.position != -1)) {
        r.seek(res.position);
      }
      res.fileName = fileName;

      while (r.next(key, record)) {
        if (record != null) {
          res.position = r.getPosition();

          timestamp = record.getTime();
          res.currentTs = timestamp;
          log.debug("\nSearch for startDate: " + new Date(t0) + " is :"
              + new Date(timestamp));

          if (timestamp < t0) {
            // log.debug("Line not in range. Skipping: " +record);
            continue;
          } else if (timestamp < t1) {
            log.debug("In Range: " + record.toString());
            boolean valid = false;

            if ((filter == null || filter.equals(""))) {
              valid = true;
            } else if (isValid(record, filter)) {
              valid = true;
            }

            if (valid) {
              records.add(record);
              record = new ChukwaRecord();
              listSize = records.size();
              if (listSize >= maxRows) {
                // maxRow so stop here
                // Update token
                token.key = key.getKey();
                token.hasMore = true;
                break;
              }
            } else {
              log.debug("In Range ==================>>>>>>>>> OUT Regex: "
                  + record);
            }
          } else {
            log.debug("Line out of range. Stopping now: " + record);
            // Update Token
            token.key = key.getKey();
            token.hasMore = false;
            break;
          }
        }
      }
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

          }
          String data[] = parseData(lines[i]);
          if (headers[0].equals("avg-cpu:")) {
            log.debug("Matched CPU-Utilization");
            record = new ChukwaRecord();
            key = new ChukwaRecordKey();
            buildGenericRecord(record, null, d.getTime(), "SystemMetrics");
          } else if (headers[0].equals("Device:")) {
            log.debug("Matched Iostat");
            record = new ChukwaRecord();
            key = new ChukwaRecordKey();
            buildGenericRecord(record, null, d.getTime(), "SystemMetrics");
          } else {
            log.debug("No match:" + headers[0]);
          }
          if (record != null) {
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

          summaryString = summaryString + lines[i] + "\n";
          i++;
        }
        i++;
        record = new ChukwaRecord();
        key = new ChukwaRecordKey();
        parseSummary(record, summaryString);
        this.buildGenericRecord(record, null, d.getTime(), reduceType);
        output.collect(key, record);

        StringBuffer buffer = new StringBuffer();
        // FIXME please validate this
        while (i < lines.length) {
          record = null;
          buffer.append(lines[i] + "\n");
          i++;

        }
        record = new ChukwaRecord();
        key = new ChukwaRecordKey();
        this.buildGenericRecord(record, buffer.toString(), d.getTime(), recordType);
        // Output Top info to database
        output.collect(key, record);

        // End of parsing
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

      int listSize = 0;

      long offset = 0;

      // HdfsWriter.HdfsWriterKey key = new HdfsWriter.HdfsWriterKey();
      ChukwaRecordKey key = new ChukwaRecordKey();
      ChukwaRecord record = new ChukwaRecord();

      while (r.next(key, record)) {
        lineCount++;
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

        // TASKID="tip_200804210403_0005_m_000018"
        // TASK_ATTEMPT_ID="task_200804210403_0005_m_000018_0"
        // START_TIME="1208760437531"
        // HOSTNAME="tracker_xxx.yyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:53734"

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/Map/" + keys.get("JOBID") + "/"
            + keys.get("START_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("START_TIME")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("START_TIME"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/Map/S");
        output.collect(key, record);

      } else if (keys.get("RECORD_TYPE").equalsIgnoreCase("MapAttempt")
          && keys.containsKey("FINISH_TIME")) {
        // MapAttempt TASK_TYPE="MAP"
        // TASKID="tip_200804210403_0005_m_005494"
        // TASK_ATTEMPT_ID="task_200804210403_0005_m_005494_0"
        // TASK_STATUS="SUCCESS"
        // FINISH_TIME="1208760624124"
        // HOSTNAME="tracker_xxxx.yyyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:55491"

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/Map/" + keys.get("JOBID") + "/"
            + keys.get("FINISH_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("FINISH_TIME")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("FINISH_TIME", keys.get("FINISH_TIME"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/Map/E");
        output.collect(key, record);
      }

      else if (keys.get("RECORD_TYPE").equalsIgnoreCase("ReduceAttempt")
          && keys.containsKey("START_TIME")) {
        // ReduceAttempt TASK_TYPE="REDUCE"
        // TASKID="tip_200804210403_0005_r_000138"
        // TASK_ATTEMPT_ID="task_200804210403_0005_r_000138_0"
        // START_TIME="1208760454885"
        // HOSTNAME="tracker_xxxx.yyyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:51947"

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/SHUFFLE/" + keys.get("JOBID") + "/"
            + keys.get("START_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("START_TIME")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("START_TIME"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/SHUFFLE/S");
        output.collect(key, record);

      } else if (keys.get("RECORD_TYPE").equalsIgnoreCase("ReduceAttempt")
          && keys.containsKey("FINISH_TIME")) {
        // ReduceAttempt TASK_TYPE="REDUCE"
        // TASKID="tip_200804210403_0005_r_000138"
        // TASK_ATTEMPT_ID="task_200804210403_0005_r_000138_0"
        // TASK_STATUS="SUCCESS" SHUFFLE_FINISHED="1208760787167"
        // SORT_FINISHED="1208760787354" FINISH_TIME="1208760802395"
        // HOSTNAME="tracker__xxxx.yyyy.com:xxx.yyy.com/xxx.xxx.xxx.xxx:51947"

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/SHUFFLE/" + keys.get("JOBID") + "/"
            + keys.get("SHUFFLE_FINISHED"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SHUFFLE_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("SHUFFLE_FINISHED", keys.get("SHUFFLE_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/SHUFFLE/E");
        output.collect(key, record);

        // SORT
        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/SORT/" + keys.get("JOBID") + "/"
            + keys.get("SHUFFLE_FINISHED"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SHUFFLE_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("SHUFFLE_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/SORT/S");
        output.collect(key, record);

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/SORT/" + keys.get("JOBID") + "/"
            + keys.get("SORT_FINISHED"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SORT_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("SORT_FINISHED", keys.get("SORT_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/SORT/E");
        output.collect(key, record);

        // Reduce
        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/REDUCE/" + keys.get("JOBID") + "/"
            + keys.get("SORT_FINISHED"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SORT_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("START_TIME", keys.get("SORT_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/REDUCE/S");
        output.collect(key, record);

        key = new ChukwaRecordKey();
        key.setKey("JobLogHist/REDUCE/" + keys.get("JOBID") + "/"
            + keys.get("FINISH_TIME"));
        key.setReduceType("JobLogHistoryReduceProcessor");
        record = new ChukwaRecord();
        record.setTime(Long.parseLong(keys.get("SORT_FINISHED")));
        record.add("JOBID", keys.get("JOBID"));
        record.add("FINISH_TIME", keys.get("SORT_FINISHED"));
        record.add(Record.tagsField, chunk.getTags());
        // log.info("JobLogHist/REDUCE/E");
        output.collect(key, record);

      } else if (keys.get("RECORD_TYPE").equalsIgnoreCase("Job")) {
        // 1
        // Job JOBID="job_200809062051_0001" JOBNAME="wordcount" USER="xxx"
        // SUBMIT_TIME="1208760906812"
        // JOBCONF=
        // "/user/xxx/mapredsystem/563976.yyy.zzz.com/job_200809062051_0001/job.xml"

        // 2
        // Job JOBID="job_200809062051_0001" LAUNCH_TIME="1208760906816"
        // TOTAL_MAPS="3" TOTAL_REDUCES="7"

        // 3
        // Job JOBID="job_200804210403_0005" FINISH_TIME="1208760906826"
        // JOB_STATUS="SUCCESS" FINISHED_MAPS="5912"
        // FINISHED_REDUCES="739" FAILED_MAPS="0" FAILED_REDUCES="0"
        // COUNTERS="File Systems.Local bytes read:1735053407244,File
        // Systems.Local bytes written:2610106384012,File Systems.HDFS
        // bytes read:801605644910,File Systems.HDFS bytes
        // written:44135800,
        // Job Counters .Launched map tasks:5912,Job Counters .Launched
        // reduce tasks:739,Job Counters .Data-local map tasks:5573,Job
        // Counters .Rack-local map tasks:316,Map-Reduce Framework.
        // Map input records:9410696067,Map-Reduce Framework.Map output
        // records:9410696067,Map-Reduce Framework.Map input
        // bytes:801599188816,Map-Reduce Framework.Map output
        // bytes:784427968116,
        // Map-Reduce Framework.Combine input records:0,Map-Reduce
        // Framework.Combine output records:0,Map-Reduce
        // Framework.Reduce input groups:477265,Map-Reduce
        // Framework.Reduce input records:739000,
        // Map-Reduce Framework.Reduce output records:739000"

        record = new ChukwaRecord();
        key = new ChukwaRecordKey();
        buildGenericRecord(record, null, Long
            .parseLong(keys.get("FINISH_TIME")), "MRJob");
        if (keys.containsKey("COUNTERS")) {
          extractCounters(record, keys.get("COUNTERS"));
        }

        key = new ChukwaRecordKey();
        key.setKey("MRJob/" + keys.get("JOBID"));
        key.setReduceType("MRJobReduceProcessor");

        record = new ChukwaRecord();
        record.add(Record.tagsField, chunk.getTags());
        if (keys.containsKey("SUBMIT_TIME")) {
          record.setTime(Long.parseLong(keys.get("SUBMIT_TIME")));
        } else if (keys.containsKey("LAUNCH_TIME")) {
          record.setTime(Long.parseLong(keys.get("LAUNCH_TIME")));
        } else if (keys.containsKey("FINISH_TIME")) {
          record.setTime(Long.parseLong(keys.get("FINISH_TIME")));
        }

        Iterator<String> it = keys.keySet().iterator();
        while (it.hasNext()) {
          String field = it.next();
          record.add(field, keys.get(field));
        }

        output.collect(key, record);
      }

      if (keys.containsKey("TASK_TYPE")
          && keys.containsKey("COUNTERS")
          && (keys.get("TASK_TYPE").equalsIgnoreCase("REDUCE") || keys.get(
              "TASK_TYPE").equalsIgnoreCase("MAP"))) {
        // MAP
        // Task TASKID="tip_200804210403_0005_m_000154" TASK_TYPE="MAP"
        // TASK_STATUS="SUCCESS" FINISH_TIME="1208760463883"
        // COUNTERS="File Systems.Local bytes read:159265655,File
        // Systems.Local bytes written:318531310,
        // File Systems.HDFS bytes read:145882417,Map-Reduce
        // Framework.Map input records:1706604,
        // Map-Reduce Framework.Map output records:1706604,Map-Reduce
        // Framework.Map input bytes:145882057,
        // Map-Reduce Framework.Map output bytes:142763253,Map-Reduce
        // Framework.Combine input records:0,Map-Reduce
        // Framework.Combine output records:0"

        // REDUCE
        // Task TASKID="tip_200804210403_0005_r_000524"
        // TASK_TYPE="REDUCE" TASK_STATUS="SUCCESS"
        // FINISH_TIME="1208760877072"
        // COUNTERS="File Systems.Local bytes read:1179319677,File
        // Systems.Local bytes written:1184474889,File Systems.HDFS
        // bytes written:59021,
        // Map-Reduce Framework.Reduce input groups:684,Map-Reduce
        // Framework.Reduce input records:1000,Map-Reduce
        // Framework.Reduce output records:1000"

        record = new ChukwaRecord();
        key = new ChukwaRecordKey();
        buildGenericRecord(record, null, Long
            .parseLong(keys.get("FINISH_TIME")), "SizeVsFinishTime");
        extractCounters(record, keys.get("COUNTERS"));
        record.add("JOBID", keys.get("JOBID"));
        record.add("TASKID", keys.get("TASKID"));
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

          // FIXME please validate this
          if (headers[1].equals("IFACE") && headers[2].equals("rxpck/s")) {
            log.debug("Matched Sar-Network");

            record = new ChukwaRecord();
            key = new ChukwaRecordKey();
            this.buildGenericRecord(record, null, d.getTime(), reduceType);
          } else if (headers[1].equals("IFACE") && headers[2].equals("rxerr/s")) {
            log.debug("Matched Sar-Network");

            record = new ChukwaRecord();
            key = new ChukwaRecordKey();
            this.buildGenericRecord(record, null, d.getTime(), reduceType);
          } else if (headers[1].equals("kbmemfree")) {
            log.debug("Matched Sar-Memory");

            record = new ChukwaRecord();
            key = new ChukwaRecordKey();
            this.buildGenericRecord(record, null, d.getTime(), reduceType);
          } else if (headers[1].equals("totsck")) {
            log.debug("Matched Sar-NetworkSockets");

            record = new ChukwaRecord();
            key = new ChukwaRecordKey();
            this.buildGenericRecord(record, null, d.getTime(), reduceType);
          } else if (headers[1].equals("runq-sz")) {
            log.debug("Matched Sar-LoadAverage");

            record = new ChukwaRecord();
            key = new ChukwaRecordKey();
            this.buildGenericRecord(record, null, d.getTime(), reduceType);
          } else {
            log.debug("No match:" + headers[1] + " " + headers[2]);
          }
          if (record != null) {
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

      Calendar calendar = Calendar.getInstance();
      calendar.setTimeInMillis(ts);
      calendar.set(Calendar.MINUTE, 0);
      calendar.set(Calendar.SECOND, 0);
      calendar.set(Calendar.MILLISECOND, 0);
      ChukwaRecordKey key = new ChukwaRecordKey();
      key.setKey("" + calendar.getTimeInMillis() + "/" + chunk.getDataType()
          + "/" + chunk.getSource() + "/" + ts);
      key.setReduceType(chunk.getDataType() + "InError");

      record.setTime(ts);

      record.add(Record.tagsField, chunk.getTags());
      record.add(Record.sourceField, chunk.getSource());
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

      // Data
      ChukwaRecord record = null;

      for (int i = 1; i < lines.length; i++) {
        values = lines[i].split("[\\s]++");
        key = new ChukwaRecordKey();
        record = new ChukwaRecord();
        this.buildGenericRecord(record, null, d.getTime(), "Df");

        record.add(headerCols[0], values[0]);
        record.add(headerCols[1], values[1]);
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecordKey

    ChukwaTestOutputCollector<ChukwaRecordKey, ChukwaRecord> output =
            new ChukwaTestOutputCollector<ChukwaRecordKey, ChukwaRecord>();

    mapper.map(new ChukwaArchiveKey(), chunk, output, Reporter.NULL);
    ChukwaRecordKey recordKey = new ChukwaRecordKey("someReduceType", SAMPLE_RECORD_DATA);

    assertEquals("MockMapProcessor never invoked - no records found", 1, output.data.size());
    assertNotNull("MockMapProcessor never invoked", output.data.get(recordKey));
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.