Examples of JobLogLine


Examples of org.apache.hadoop.chukwa.extraction.demux.processor.mapper.JobLog.JobLogLine

    }
  }

  public void testJobLogEntry() {
    JobLog jobLog = new JobLog();
    JobLogLine log = jobLog.getJobLogLine(testLogList.get(1));
    assertEquals("JobData", log.getLogType());
    assertEquals("hdfs://test33/tmp/hadoop-gmon/mapred/system/job_200903062215_0577/job\\.xml", log.get("JOBCONF"));
    assertEquals("job_200903062215_0577", log.get("JOBID"));
    assertEquals("grep-search", log.get("JOBNAME"));
    assertEquals("gmon", log.get("USER"));
    assertEquals("1236386525570", log.get("SUBMIT_TIME"));
    assertEquals(1236386525570l, log.getTimestamp());
   
    log = jobLog.getJobLogLine(testLogList.get(2));
    assertEquals(1236386525570l, log.getTimestamp());
   
    log = jobLog.getJobLogLine(testLogList.get(4));
    assertEquals("TaskData", log.getLogType());
    assertEquals("", log.get("SPLITS"));
    assertEquals(1236386529449l, log.getTimestamp());
   
    log = jobLog.getJobLogLine(testLogList.get(72));
    assertEquals("TaskData", log.getLogType());
    assertEquals("{(org\\.apache\\.hadoop\\.mapred\\.Task$Counter)(Map-Reduce Framework)[(REDUCE_INPUT_GROUPS)(Reduce input groups)(0)][(COMBINE_OUTPUT_RECORDS)(Combine output records)(0)][(REDUCE_SHUFFLE_BYTES)(Reduce shuffle bytes)(0)][(REDUCE_OUTPUT_RECORDS)(Reduce output records)(0)][(SPILLED_RECORDS)(Spilled Records)(0)][(COMBINE_INPUT_RECORDS)(Combine input records)(0)][(REDUCE_INPUT_RECORDS)(Reduce input records)(0)]}", log.get("COUNTERS"));
   
    log = jobLog.getJobLogLine(testLogList.get(73));
    HashMap<String, Long> counters = log.getCounterHash().flat();
    assertEquals("1", counters.get("Counter:org.apache.hadoop.mapred.JobInProgress$Counter:TOTAL_LAUNCHED_REDUCES").toString());
    assertEquals("20471", counters.get("Counter:FileSystemCounters:HDFS_BYTES_READ").toString());
   
    log = jobLog.getJobLogLine(testLogList.get(90));
    assertTrue("START_TIME should not exist", log.get("START_TIME")==null);

    log = jobLog.getJobLogLine("");
    assertTrue(log==null);
   
    log = jobLog.getJobLogLine("Job JOBID=\"job_200903042324_8630\" FINISH_TIME=\"1236527538594\" JOB_STATUS=\"SUCCESS\" FINISHED_MAPS=\"10\" FINISHED_REDUCES=\"8\" FAILED_MAPS=\"0\" FAILED_REDUCES=\"0\" COUNTERS=\"input records:0,Map-Reduce Framework.Reduce input records:57038\"");
   
    // print all key-values
    for(String line : testLogList) {
      log = jobLog.getJobLogLine(line);
      if(log == null) {
        continue;
      }
      System.out.println(log.getLogType());
      for(Entry<String, String> entry : log.entrySet()) {
        String k = entry.getKey();
        String v = entry.getValue();
        System.out.println(k + ": " + v);
        if(k.equals("START_TIME") || k.equals("FINISH_TIME"))
          assertTrue(v!=null && !v.equals("0"));
      }
     
      // list all counters for this entry
      for(Entry<String, Long> entry : log.getCounterHash().flat().entrySet()) {
        System.out.println(entry.getKey() + ": " + entry.getValue());
      }
     
      System.out.println();
    }
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.demux.processor.mapper.JobLog.JobLogLine

    }
  }

  public void testJobLogEntry() {
    JobLog jobLog = new JobLog();
    JobLogLine log = jobLog.getJobLogLine(testLogList.get(1));
    assertEquals("JobData", log.getLogType());
    assertEquals("hdfs://test33/tmp/hadoop-gmon/mapred/system/job_200903062215_0577/job\\.xml", log.get("JOBCONF"));
    assertEquals("job_200903062215_0577", log.get("JOBID"));
    assertEquals("grep-search", log.get("JOBNAME"));
    assertEquals("gmon", log.get("USER"));
    assertEquals("1236386525570", log.get("SUBMIT_TIME"));
    assertEquals(1236386525570l, log.getTimestamp());
   
    log = jobLog.getJobLogLine(testLogList.get(2));
    assertEquals(1236386525570l, log.getTimestamp());
   
    log = jobLog.getJobLogLine(testLogList.get(4));
    assertEquals("TaskData", log.getLogType());
    assertEquals("", log.get("SPLITS"));
    assertEquals(1236386529449l, log.getTimestamp());
   
    log = jobLog.getJobLogLine(testLogList.get(72));
    assertEquals("TaskData", log.getLogType());
    assertEquals("{(org\\.apache\\.hadoop\\.mapred\\.Task$Counter)(Map-Reduce Framework)[(REDUCE_INPUT_GROUPS)(Reduce input groups)(0)][(COMBINE_OUTPUT_RECORDS)(Combine output records)(0)][(REDUCE_SHUFFLE_BYTES)(Reduce shuffle bytes)(0)][(REDUCE_OUTPUT_RECORDS)(Reduce output records)(0)][(SPILLED_RECORDS)(Spilled Records)(0)][(COMBINE_INPUT_RECORDS)(Combine input records)(0)][(REDUCE_INPUT_RECORDS)(Reduce input records)(0)]}", log.get("COUNTERS"));
   
    log = jobLog.getJobLogLine(testLogList.get(73));
    HashMap<String, Long> counters = log.getCounterHash().flat();
    assertEquals("1", counters.get("Counter:org.apache.hadoop.mapred.JobInProgress$Counter:TOTAL_LAUNCHED_REDUCES").toString());
    assertEquals("20471", counters.get("Counter:FileSystemCounters:HDFS_BYTES_READ").toString());
   
    log = jobLog.getJobLogLine(testLogList.get(90));
    assertTrue("START_TIME should not exist", log.get("START_TIME")==null);

    log = jobLog.getJobLogLine("");
    assertTrue(log==null);
   
    log = jobLog.getJobLogLine("Job JOBID=\"job_200903042324_8630\" FINISH_TIME=\"1236527538594\" JOB_STATUS=\"SUCCESS\" FINISHED_MAPS=\"10\" FINISHED_REDUCES=\"8\" FAILED_MAPS=\"0\" FAILED_REDUCES=\"0\" COUNTERS=\"input records:0,Map-Reduce Framework.Reduce input records:57038\"");
   
    // print all key-values
    for(String line : testLogList) {
      log = jobLog.getJobLogLine(line);
      if(log == null) {
        continue;
      }
      System.out.println(log.getLogType());
      for(Entry<String, String> entry : log.entrySet()) {
        String k = entry.getKey();
        String v = entry.getValue();
        System.out.println(k + ": " + v);
        if(k.equals("START_TIME") || k.equals("FINISH_TIME"))
          assertTrue(v!=null && !v.equals("0"));
      }
     
      // list all counters for this entry
      for(Entry<String, Long> entry : log.getCounterHash().flat().entrySet()) {
        System.out.println(entry.getKey() + ": " + entry.getValue());
      }
     
      System.out.println();
    }
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.demux.processor.mapper.JobLog.JobLogLine

    }
  }

  public void testJobLogEntry() {
    JobLog jobLog = new JobLog();
    JobLogLine log = jobLog.getJobLogLine(testLogList.get(0));
    assertEquals("Meta", log.getLogType());
    assertEquals("1", log.get("VERSION"));
    assertEquals(0l, log.getTimestamp());
   
    log = jobLog.getJobLogLine(testLogList.get(1));
    assertEquals("JobData", log.getLogType());
    assertEquals("hdfs://test33/tmp/hadoop-gmon/mapred/system/job_200903062215_0577/job\\.xml", log.get("JOBCONF"));
    assertEquals("job_200903062215_0577", log.get("JOBID"));
    assertEquals("grep-search", log.get("JOBNAME"));
    assertEquals("gmon", log.get("USER"));
    assertEquals("1236386525570", log.get("SUBMIT_TIME"));
    assertEquals(1236386525570l, log.getTimestamp());
   
    log = jobLog.getJobLogLine(testLogList.get(2));
    assertEquals(1236386525570l, log.getTimestamp());
   
    log = jobLog.getJobLogLine(testLogList.get(4));
    assertEquals("TaskData", log.getLogType());
    assertEquals("", log.get("SPLITS"));
    assertEquals(1236386529449l, log.getTimestamp());
   
    log = jobLog.getJobLogLine(testLogList.get(72));
    assertEquals("TaskData", log.getLogType());
    assertEquals("{(org\\.apache\\.hadoop\\.mapred\\.Task$Counter)(Map-Reduce Framework)[(REDUCE_INPUT_GROUPS)(Reduce input groups)(0)][(COMBINE_OUTPUT_RECORDS)(Combine output records)(0)][(REDUCE_SHUFFLE_BYTES)(Reduce shuffle bytes)(0)][(REDUCE_OUTPUT_RECORDS)(Reduce output records)(0)][(SPILLED_RECORDS)(Spilled Records)(0)][(COMBINE_INPUT_RECORDS)(Combine input records)(0)][(REDUCE_INPUT_RECORDS)(Reduce input records)(0)]}", log.get("COUNTERS"));
   
    log = jobLog.getJobLogLine(testLogList.get(73));
    HashMap<String, Long> counters = log.getCounterHash().flat();
    assertEquals("1", counters.get("Counter:org.apache.hadoop.mapred.JobInProgress$Counter:TOTAL_LAUNCHED_REDUCES").toString());
    assertEquals("20471", counters.get("Counter:FileSystemCounters:HDFS_BYTES_READ").toString());
   
    log = jobLog.getJobLogLine("");
    assertTrue(log==null);
   
    log = jobLog.getJobLogLine("Job JOBID=\"job_200903042324_8630\" FINISH_TIME=\"1236527538594\" JOB_STATUS=\"SUCCESS\" FINISHED_MAPS=\"10\" FINISHED_REDUCES=\"8\" FAILED_MAPS=\"0\" FAILED_REDUCES=\"0\" COUNTERS=\"input records:0,Map-Reduce Framework.Reduce input records:57038\"");
   
    // print all key-values
    for(String line : testLogList) {
      log = jobLog.getJobLogLine(line);
      if(log == null) {
        continue;
      }
      System.out.println(log.getLogType());
      for(Entry<String, String> entry : log.entrySet()) {
        System.out.println(entry.getKey() + ": " + entry.getValue());
      }
     
      // list all counters for this entry
      for(Entry<String, Long> entry : log.getCounterHash().flat().entrySet()) {
        System.out.println(entry.getKey() + ": " + entry.getValue());
      }
     
      System.out.println();
    }
View Full Code Here

Examples of org.platformlayer.jobs.model.JobLogLine

    JobLogExceptionInfo jobLogExceptionInfo = null;
    if (exceptionStacks != null) {
      jobLogExceptionInfo = JobUtils.buildJobLogExceptionInfo(exceptionStacks);
    }

    JobLogLine jobLogLine = new JobLogLine(System.currentTimeMillis(), level, message, jobLogExceptionInfo);
    lines.add(jobLogLine);
  }
View Full Code Here

Examples of org.platformlayer.jobs.model.JobLogLine

  }

  @Override
  public void enterScope(Object controller) {
    String name = ControllerNameStrategy.getName(controller);
    JobLogLine line = new JobLogLine();
    line.type = JobLogLine.TYPE_ENTER_SCOPE;
    line.message = name;
    lines.add(line);
  }
View Full Code Here

Examples of org.platformlayer.jobs.model.JobLogLine

    lines.add(line);
  }

  @Override
  public void exitScope() {
    JobLogLine line = new JobLogLine();
    line.type = JobLogLine.TYPE_EXIT_SCOPE;
    lines.add(line);
  }
View Full Code Here

Examples of org.platformlayer.jobs.model.JobLogLine

          int oldLimit = in.pushLimit(length);

          protobuf.clear();
          protobuf.mergeFrom(in);

          JobLogLine line = new JobLogLine();
          line.level = protobuf.getLevel();
          line.timestamp = protobuf.getTimestamp();
          line.message = protobuf.getMessage();
          line.type = protobuf.getType();
          if (protobuf.hasException()) {
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.