Package org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser

Examples of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo


    {
      Path histPath = new Path(getClass().getClassLoader().getResource(
        "job_2.0.3-alpha-FAILED.jhist").getFile());
      JobHistoryParser parser = new JobHistoryParser(FileSystem.getLocal
          (new Configuration()), histPath);
      JobInfo jobInfo = parser.parse();
      LOG.info(" job info: " + jobInfo.getJobname() + " "
        + jobInfo.getFinishedMaps() + " "
        + jobInfo.getTotalMaps() + " "
        + jobInfo.getJobId() ) ;
    }
View Full Code Here


    {
      Path histPath = new Path(getClass().getClassLoader().getResource(
        "job_2.4.0-FAILED.jhist").getFile());
      JobHistoryParser parser = new JobHistoryParser(FileSystem.getLocal
          (new Configuration()), histPath);
      JobInfo jobInfo = parser.parse();
      LOG.info(" job info: " + jobInfo.getJobname() + " "
        + jobInfo.getFinishedMaps() + " "
        + jobInfo.getTotalMaps() + " "
        + jobInfo.getJobId() );
    }
View Full Code Here

    {
      Path histPath = new Path(getClass().getClassLoader().getResource(
          "job_0.23.9-FAILED.jhist").getFile());
      JobHistoryParser parser = new JobHistoryParser(FileSystem.getLocal
          (new Configuration()), histPath);
      JobInfo jobInfo = parser.parse();
      LOG.info(" job info: " + jobInfo.getJobname() + " "
        + jobInfo.getFinishedMaps() + " "
        + jobInfo.getTotalMaps() + " "
        + jobInfo.getJobId() ) ;
      }
View Full Code Here

    }
  }

  @Test(timeout = 50000)
  public void testJobInfo() throws Exception {
    JobInfo info = new JobInfo();
    Assert.assertEquals("NORMAL", info.getPriority());
    info.printAll();
  }
View Full Code Here

    }

    JobHistory jobHistory = new JobHistory();
    jobHistory.init(conf);
    HistoryFileInfo fileInfo = jobHistory.getJobFileInfo(jobId);
    JobInfo jobInfo;
    long numFinishedMaps;

    synchronized (fileInfo) {
      Path historyFilePath = fileInfo.getHistoryFile();
      FSDataInputStream in = null;
      LOG.info("JobHistoryFile is: " + historyFilePath);
      try {
        in = fc.open(fc.makeQualified(historyFilePath));
      } catch (IOException ioe) {
        LOG.info("Can not open history file: " + historyFilePath, ioe);
        throw (new Exception("Can not open History File"));
      }

      JobHistoryParser parser = new JobHistoryParser(in);
      final EventReader realReader = new EventReader(in);
      EventReader reader = Mockito.mock(EventReader.class);
      if (numMaps == numSuccessfulMaps) {
        reader = realReader;
      } else {
        final AtomicInteger numFinishedEvents = new AtomicInteger(0); // Hack!
        Mockito.when(reader.getNextEvent()).thenAnswer(
            new Answer<HistoryEvent>() {
              public HistoryEvent answer(InvocationOnMock invocation)
                  throws IOException {
                HistoryEvent event = realReader.getNextEvent();
                if (event instanceof TaskFinishedEvent) {
                  numFinishedEvents.incrementAndGet();
                }

                if (numFinishedEvents.get() <= numSuccessfulMaps) {
                  return event;
                } else {
                  throw new IOException("test");
                }
              }
            });
      }

      jobInfo = parser.parse(reader);

      numFinishedMaps = computeFinishedMaps(jobInfo, numMaps, numSuccessfulMaps);

      if (numFinishedMaps != numMaps) {
        Exception parseException = parser.getParseException();
        Assert.assertNotNull("Didn't get expected parse exception",
            parseException);
      }
    }

    Assert.assertEquals("Incorrect username ", System.getProperty("user.name"),
        jobInfo.getUsername());
    Assert.assertEquals("Incorrect jobName ", "test", jobInfo.getJobname());
    Assert.assertEquals("Incorrect queuename ", "default",
        jobInfo.getJobQueueName());
    Assert
        .assertEquals("incorrect conf path", "test", jobInfo.getJobConfPath());
    Assert.assertEquals("incorrect finishedMap ", numSuccessfulMaps,
        numFinishedMaps);
    Assert.assertEquals("incorrect finishedReduces ", numReduces,
        jobInfo.getFinishedReduces());
    Assert.assertEquals("incorrect uberized ", job.isUber(),
        jobInfo.getUberized());
    Map<TaskID, TaskInfo> allTasks = jobInfo.getAllTasks();
    int totalTasks = allTasks.size();
    Assert.assertEquals("total number of tasks is incorrect  ",
        (numMaps + numReduces), totalTasks);

    // Verify aminfo
    Assert.assertEquals(1, jobInfo.getAMInfos().size());
    Assert.assertEquals(MRApp.NM_HOST, jobInfo.getAMInfos().get(0)
        .getNodeManagerHost());
    AMInfo amInfo = jobInfo.getAMInfos().get(0);
    Assert.assertEquals(MRApp.NM_PORT, amInfo.getNodeManagerPort());
    Assert.assertEquals(MRApp.NM_HTTP_PORT, amInfo.getNodeManagerHttpPort());
    Assert.assertEquals(1, amInfo.getAppAttemptId().getAttemptId());
    Assert.assertEquals(amInfo.getAppAttemptId(), amInfo.getContainerId()
        .getApplicationAttemptId());
View Full Code Here

  private void parsePreviousJobHistory() throws IOException {
    FSDataInputStream in = getPreviousJobHistoryStream(getConfig(),
        appAttemptID);
    JobHistoryParser parser = new JobHistoryParser(in);
    JobInfo jobInfo = parser.parse();
    Exception parseException = parser.getParseException();
    if (parseException != null) {
      LOG.info("Got an error parsing job-history file" +
          ", ignoring incomplete events.", parseException);
    }
    Map<org.apache.hadoop.mapreduce.TaskID, TaskInfo> taskInfos = jobInfo
        .getAllTasks();
    for (TaskInfo taskInfo : taskInfos.values()) {
      if (TaskState.SUCCEEDED.toString().equals(taskInfo.getTaskStatus())) {
        Iterator<Entry<TaskAttemptID, TaskAttemptInfo>> taskAttemptIterator =
            taskInfo.getAllTaskAttempts().entrySet().iterator();
        while (taskAttemptIterator.hasNext()) {
          Map.Entry<TaskAttemptID, TaskAttemptInfo> currentEntry = taskAttemptIterator.next();
          if (!jobInfo.getAllCompletedTaskAttempts().containsKey(currentEntry.getKey())) {
            taskAttemptIterator.remove();
          }
        }
        completedTasksFromPreviousRun
            .put(TypeConverter.toYarn(taskInfo.getTaskId()), taskInfo);
        LOG.info("Read from history task "
            + TypeConverter.toYarn(taskInfo.getTaskId()));
      }
    }
    LOG.info("Read completed tasks from history "
        + completedTasksFromPreviousRun.size());
    recoveredJobStartTime = jobInfo.getLaunchTime();

    // recover AMInfos
    List<JobHistoryParser.AMInfo> jhAmInfoList = jobInfo.getAMInfos();
    if (jhAmInfoList != null) {
      for (JobHistoryParser.AMInfo jhAmInfo : jhAmInfoList) {
        AMInfo amInfo = MRBuilderUtils.newAMInfo(jhAmInfo.getAppAttemptId(),
            jhAmInfo.getStartTime(), jhAmInfo.getContainerId(),
            jhAmInfo.getNodeManagerHost(), jhAmInfo.getNodeManagerPort(),
View Full Code Here

            }
          }
        );
    }
   
    JobInfo jobInfo = parser.parse(reader);
   
    long numFinishedMaps =
        computeFinishedMaps(jobInfo, numMaps, numSuccessfulMaps);
   
    if (numFinishedMaps != numMaps) {
      Exception parseException = parser.getParseException();
      Assert.assertNotNull("Didn't get expected parse exception",
          parseException);
    }
   
    Assert.assertEquals("Incorrect username ", System.getProperty("user.name"),
        jobInfo.getUsername());
    Assert.assertEquals("Incorrect jobName ", "test", jobInfo.getJobname());
    Assert.assertEquals("Incorrect queuename ", "default",
        jobInfo.getJobQueueName());
    Assert
        .assertEquals("incorrect conf path", "test", jobInfo.getJobConfPath());
    Assert.assertEquals("incorrect finishedMap ", numSuccessfulMaps,
        numFinishedMaps);
    Assert.assertEquals("incorrect finishedReduces ", numReduces,
        jobInfo.getFinishedReduces());
    Assert.assertEquals("incorrect uberized ", job.isUber(),
        jobInfo.getUberized());
    Map<TaskID, TaskInfo> allTasks = jobInfo.getAllTasks();
    int totalTasks = allTasks.size();
    Assert.assertEquals("total number of tasks is incorrect  ",
        (numMaps+numReduces), totalTasks);

    // Verify aminfo
    Assert.assertEquals(1, jobInfo.getAMInfos().size());
    Assert.assertEquals(MRApp.NM_HOST, jobInfo.getAMInfos().get(0)
        .getNodeManagerHost());
    AMInfo amInfo = jobInfo.getAMInfos().get(0);
    Assert.assertEquals(MRApp.NM_PORT, amInfo.getNodeManagerPort());
    Assert.assertEquals(MRApp.NM_HTTP_PORT, amInfo.getNodeManagerHttpPort());
    Assert.assertEquals(1, amInfo.getAppAttemptId().getAttemptId());
    Assert.assertEquals(amInfo.getAppAttemptId(), amInfo.getContainerId()
        .getApplicationAttemptId());
View Full Code Here

      LOG.info("Can not open history file: " + historyFilePath, ioe);
      throw (new Exception("Can not open History File"));
    }

    JobHistoryParser parser = new JobHistoryParser(in);
    JobInfo jobInfo = parser.parse();
    Exception parseException = parser.getParseException();
    Assert.assertNull("Caught an expected exception " + parseException,
        parseException);
    int noOffailedAttempts = 0;
    Map<TaskID, TaskInfo> allTasks = jobInfo.getAllTasks();
    for (Task task : job.getTasks().values()) {
      TaskInfo taskInfo = allTasks.get(TypeConverter.fromYarn(task.getID()));
      for (TaskAttempt taskAttempt : task.getAttempts().values()) {
        TaskAttemptInfo taskAttemptInfo = taskInfo.getAllTaskAttempts().get(
            TypeConverter.fromYarn((taskAttempt.getID())));
View Full Code Here

    final Path jobFile = new Path(logFile);
    String jobid = JobHistory.getJobIDFromHistoryFilePath(jobFile).toString();

    final FileSystem fs = (FileSystem) application.getAttribute("fileSys");
    final JobTracker jobTracker = (JobTracker) application.getAttribute("job.tracker");
    JobInfo job = JSPUtil.checkAccessAndGetJobInfo(request, response,
        jobTracker, fs, jobFile);
    if (job == null) {
      return;
    }
    if (job.getJobStatus().equals("FAILED"))
      reasonforFailure = job.getErrorInfo();

      out.write("\n\n<html>\n<head>\n<title>Hadoop Job ");
      out.print(jobid);
      out.write(" on History Viewer</title>\n<link rel=\"stylesheet\" type=\"text/css\" href=\"/static/hadoop.css\">\n</head>\n<body>\n\n<h2>Hadoop Job ");
      out.print(jobid );
      out.write(" on <a href=\"jobhistory.jsp\">History Viewer</a></h2>\n\n<b>User: </b> ");
      out.print(HtmlQuoting.quoteHtmlChars(job.getUsername()) );
      out.write("<br/>\n<b>JobName: </b> ");
      out.print(HtmlQuoting.quoteHtmlChars(job.getJobname()) );
      out.write("<br/>\n<b>JobConf: </b> <a href=\"jobconf_history.jsp?logFile=");
      out.print(logFile);
      out.write("\"> \n                 ");
      out.print(job.getJobConfPath() );
      out.write("</a><br/> \n");
        
  Map<JobACL, AccessControlList> jobAcls = job.getJobACLs();
  JSPUtil.printJobACLs(jobTracker, jobAcls, out);

      out.write("\n<b>Submitted At: </b> ");
      out.print(StringUtils.getFormattedTimeWithDiff(dateFormat, job.getSubmitTime(), 0 )  );
      out.write("<br/> \n<b>Launched At: </b> ");
      out.print(StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLaunchTime(), job.getSubmitTime()) );
      out.write("<br/>\n<b>Finished At: </b>  ");
      out.print(StringUtils.getFormattedTimeWithDiff(dateFormat, job.getFinishTime(), job.getLaunchTime()) );
      out.write("<br/>\n<b>Status: </b> ");
      out.print( ((job.getJobStatus()) == null ? "Incomplete" :job.getJobStatus()) );
      out.write("<br/> \n<b>ReasonForFailure: </b> ");
      out.print(reasonforFailure );
      out.write("<br/>\n");

    HistoryViewer.SummarizedJob sj = new HistoryViewer.SummarizedJob(job);

      out.write("\n<b><a href=\"analysejobhistory.jsp?logFile=");
      out.print(logFile);
      out.write("\">Analyse This Job</a></b> \n<hr/>\n<center>\n<table border=\"2\" cellpadding=\"5\" cellspacing=\"2\">\n<tr>\n<td>Kind</td><td>Total Tasks(successful+failed+killed)</td><td>Successful tasks</td><td>Failed tasks</td><td>Killed tasks</td><td>Start Time</td><td>Finish Time</td>\n</tr>\n<tr>\n<td>Setup</td>\n    <td><a href=\"jobtaskshistory.jsp?logFile=");
      out.print(logFile);
      out.write("&taskType=JOB_SETUP&status=all\">\n        ");
      out.print(sj.getTotalSetups());
      out.write("</a></td>\n    <td><a href=\"jobtaskshistory.jsp?logFile=");
      out.print(logFile);
      out.write("&taskType=JOB_SETUP&status=SUCCEEDED\">\n        ");
      out.print(sj.getNumFinishedSetups());
      out.write("</a></td>\n    <td><a href=\"jobtaskshistory.jsp?logFile=");
      out.print(logFile);
      out.write("&taskType=JOB_SETUP&status=FAILED\">\n        ");
      out.print(sj.getNumFailedSetups());
      out.write("</a></td>\n    <td><a href=\"jobtaskshistory.jsp?logFile=");
      out.print(logFile);
      out.write("&taskType=JOB_SETUP&status=KILLED\">\n        ");
      out.print(sj.getNumKilledSetups());
      out.write("</a></td>  \n    <td>");
      out.print(StringUtils.getFormattedTimeWithDiff(dateFormat, sj.getSetupStarted(), 0) );
      out.write("</td>\n    <td>");
      out.print(StringUtils.getFormattedTimeWithDiff(dateFormat, sj.getSetupFinished(), sj.getSetupStarted()) );
      out.write("</td>\n</tr>\n<tr>\n<td>Map</td>\n    <td><a href=\"jobtaskshistory.jsp?logFile=");
      out.print(logFile);
      out.write("&taskType=MAP&status=all\">\n        ");
      out.print(sj.getTotalMaps());
      out.write("</a></td>\n    <td><a href=\"jobtaskshistory.jsp?logFile=");
      out.print(logFile);
      out.write("&taskType=MAP&status=SUCCEEDED\">\n        ");
      out.print(job.getFinishedMaps() );
      out.write("</a></td>\n    <td><a href=\"jobtaskshistory.jsp?logFile=");
      out.print(logFile);
      out.write("&taskType=MAP&status=FAILED\">\n        ");
      out.print(sj.getNumFailedMaps());
      out.write("</a></td>\n    <td><a href=\"jobtaskshistory.jsp?logFile=");
      out.print(logFile);
      out.write("&taskType=MAP&status=KILLED\">\n        ");
      out.print(sj.getNumKilledMaps());
      out.write("</a></td>\n    <td>");
      out.print(StringUtils.getFormattedTimeWithDiff(dateFormat, sj.getMapStarted(), 0) );
      out.write("</td>\n    <td>");
      out.print(StringUtils.getFormattedTimeWithDiff(dateFormat, sj.getMapFinished(), sj.getMapStarted()) );
      out.write("</td>\n</tr>\n<tr>\n<td>Reduce</td>\n    <td><a href=\"jobtaskshistory.jsp?logFile=");
      out.print(logFile);
      out.write("&taskType=REDUCE&status=all\">\n        ");
      out.print(sj.getTotalReduces());
      out.write("</a></td>\n    <td><a href=\"jobtaskshistory.jsp?logFile=");
      out.print(logFile);
      out.write("&taskType=REDUCE&status=SUCCEEDED\">\n        ");
      out.print(job.getFinishedReduces());
      out.write("</a></td>\n    <td><a href=\"jobtaskshistory.jsp?logFile=");
      out.print(logFile);
      out.write("&taskType=REDUCE&status=FAILED\">\n        ");
      out.print(sj.getNumFailedReduces());
      out.write("</a></td>\n    <td><a href=\"jobtaskshistory.jsp?logFile=");
      out.print(logFile);
      out.write("&taskType=REDUCE&status=KILLED\">\n        ");
      out.print(sj.getNumKilledReduces());
      out.write("</a></td>  \n    <td>");
      out.print(StringUtils.getFormattedTimeWithDiff(dateFormat, sj.getReduceStarted(), 0) );
      out.write("</td>\n    <td>");
      out.print(StringUtils.getFormattedTimeWithDiff(dateFormat, sj.getReduceFinished(), sj.getReduceStarted()) );
      out.write("</td>\n</tr>\n<tr>\n<td>Cleanup</td>\n    <td><a href=\"jobtaskshistory.jsp?logFile=");
      out.print(logFile);
      out.write("&taskType=JOB_CLEANUP&status=all\">\n        ");
      out.print(sj.getTotalCleanups());
      out.write("</a></td>\n    <td><a href=\"jobtaskshistory.jsp?logFile=");
      out.print(logFile);
      out.write("&taskType=JOB_CLEANUP&status=SUCCEEDED\">\n        ");
      out.print(sj.getNumFinishedCleanups());
      out.write("</a></td>\n    <td><a href=\"jobtaskshistory.jsp?logFile=");
      out.print(logFile);
      out.write("&taskType=JOB_CLEANUP&status=FAILED\">\n        ");
      out.print(sj.getNumFailedCleanups());
      out.write("</a></td>\n    <td><a href=\"jobtaskshistory.jsp?logFile=");
      out.print(logFile);
      out.write("&taskType=JOB_CLEANUP&status=KILLED>\">\n        ");
      out.print(sj.getNumKilledCleanups());
      out.write("</a></td>  \n    <td>");
      out.print(StringUtils.getFormattedTimeWithDiff(dateFormat, sj.getCleanupStarted(), 0) );
      out.write("</td>\n    <td>");
      out.print(StringUtils.getFormattedTimeWithDiff(dateFormat, sj.getCleanupFinished(), sj.getCleanupStarted()) );
      out.write("</td>\n</tr>\n</table>\n\n<br>\n<br>\n\n<table border=2 cellpadding=\"5\" cellspacing=\"2\">\n  <tr>\n  <th><br/></th>\n  <th>Counter</th>\n  <th>Map</th>\n  <th>Reduce</th>\n  <th>Total</th>\n</tr>\n\n");
 

Counters totalCounters = job.getTotalCounters();
Counters mapCounters = job.getMapCounters();
Counters reduceCounters = job.getReduceCounters();

if (totalCounters != null) {
   for (String groupName : totalCounters.getGroupNames()) {
     CounterGroup totalGroup = totalCounters.getGroup(groupName);
     CounterGroup mapGroup = mapCounters.getGroup(groupName);
View Full Code Here

  private void parsePreviousJobHistory() throws IOException {
    FSDataInputStream in = getPreviousJobHistoryStream(getConfig(),
        appAttemptID);
    JobHistoryParser parser = new JobHistoryParser(in);
    JobInfo jobInfo = parser.parse();
    Exception parseException = parser.getParseException();
    if (parseException != null) {
      LOG.info("Got an error parsing job-history file" +
          ", ignoring incomplete events.", parseException);
    }
    Map<org.apache.hadoop.mapreduce.TaskID, TaskInfo> taskInfos = jobInfo
        .getAllTasks();
    for (TaskInfo taskInfo : taskInfos.values()) {
      if (TaskState.SUCCEEDED.toString().equals(taskInfo.getTaskStatus())) {
        Iterator<Entry<TaskAttemptID, TaskAttemptInfo>> taskAttemptIterator =
            taskInfo.getAllTaskAttempts().entrySet().iterator();
        while (taskAttemptIterator.hasNext()) {
          Map.Entry<TaskAttemptID, TaskAttemptInfo> currentEntry = taskAttemptIterator.next();
          if (!jobInfo.getAllCompletedTaskAttempts().containsKey(currentEntry.getKey())) {
            taskAttemptIterator.remove();
          }
        }
        completedTasksFromPreviousRun
            .put(TypeConverter.toYarn(taskInfo.getTaskId()), taskInfo);
        LOG.info("Read from history task "
            + TypeConverter.toYarn(taskInfo.getTaskId()));
      }
    }
    LOG.info("Read completed tasks from history "
        + completedTasksFromPreviousRun.size());
    recoveredJobStartTime = jobInfo.getLaunchTime();

    // recover AMInfos
    List<JobHistoryParser.AMInfo> jhAmInfoList = jobInfo.getAMInfos();
    if (jhAmInfoList != null) {
      for (JobHistoryParser.AMInfo jhAmInfo : jhAmInfoList) {
        AMInfo amInfo = MRBuilderUtils.newAMInfo(jhAmInfo.getAppAttemptId(),
            jhAmInfo.getStartTime(), jhAmInfo.getContainerId(),
            jhAmInfo.getNodeManagerHost(), jhAmInfo.getNodeManagerPort(),
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.