Package org.apache.hadoop.mapred

Examples of org.apache.hadoop.mapred.TaskReport


    }

    protected long computeTimeSpent(Iterator<TaskReport> taskReports) {
        long timeSpent = 0;
        while (taskReports.hasNext()) {
            TaskReport r = taskReports.next();
            timeSpent += (r.getFinishTime() - r.getStartTime());
        }
        return timeSpent;
    }
View Full Code Here


    }

    protected void getErrorMessages(Iterator<TaskReport> reports, String type,
            boolean errNotDbg, PigContext pigContext) throws Exception {
        while(reports.hasNext()) {
            TaskReport report = reports.next();
            String msgs[] = report.getDiagnostics();
            ArrayList<Exception> exceptions = new ArrayList<Exception>();
            String exceptionCreateFailMsg = null;
            boolean jobFailed = false;
            if (msgs.length > 0) {
                if (HadoopShims.isJobFailed(report)) {
                    jobFailed = true;
                }
                Set<String> errorMessageSet = new HashSet<String>();
                for (int j = 0; j < msgs.length; j++) {
                    if (!errorMessageSet.contains(msgs[j])) {
                        errorMessageSet.add(msgs[j]);
                        if (errNotDbg) {
                            // errNotDbg is used only for failed jobs
                            // keep track of all the unique exceptions
                            try {
                                LogUtils.writeLog("Backend error message",
                                        msgs[j], pigContext.getProperties()
                                                .getProperty("pig.logfile"),
                                        log);
                                Exception e = getExceptionFromString(msgs[j]);
                                exceptions.add(e);
                            } catch (Exception e1) {
                                exceptionCreateFailMsg = msgs[j];

                            }
                        } else {
                            log.debug("Error message from task (" + type + ") "
                                    + report.getTaskID() + msgs[j]);
                        }
                    }
                }
            }
            // if there are no valid exception that could be created, report
            if (jobFailed && (exceptions.size() == 0) && (exceptionCreateFailMsg != null)) {
                int errCode = 2997;
                String msg = "Unable to recreate exception from backed error: "
                        + exceptionCreateFailMsg;
                throw new ExecException(msg, errCode, PigException.BUG);
            }

            // if its a failed job then check if there is more than one
            // exception
            // more than one exception implies possibly different kinds of
            // failures
            // log all the different failures and throw the exception
            // corresponding
            // to the first failure
            if (jobFailed) {
                if (exceptions.size() > 1) {
                    for (int j = 0; j < exceptions.size(); ++j) {
                        String headerMessage = "Error message from task ("
                                + type + ") " + report.getTaskID();
                        LogUtils.writeLog(exceptions.get(j), pigContext
                                .getProperties().getProperty("pig.logfile"),
                                log, false, headerMessage, false, false);
                    }
                    throw exceptions.get(0);
View Full Code Here

        long median = 0;
        long total = 0;
        List<Long> durations = new ArrayList<Long>();

        while(tasks.hasNext()){
            TaskReport rpt = tasks.next();
            long duration = rpt.getFinishTime() - rpt.getStartTime();
            durations.add(duration);
            max = (duration > max) ? duration : max;
            min = (duration < min) ? duration : min;
            total += duration;
            size++;
View Full Code Here

        long median = 0;
        long total = 0;
        long durations[] = new long[size];

        for (int i = 0; i < tasks.length; i++) {
            TaskReport rpt = tasks[i];
            long duration = rpt.getFinishTime() - rpt.getStartTime();
            durations[i] = duration;
            max = (duration > max) ? duration : max;
            min = (duration < min) ? duration : min;
            total += duration;
        }
View Full Code Here

            long median = 0;
            long total = 0;
            long durations[] = new long[size];
           
            for (int i = 0; i < maps.length; i++) {
              TaskReport rpt = maps[i];
                long duration = rpt.getFinishTime() - rpt.getStartTime();
                durations[i] = duration;
                max = (duration > max) ? duration : max;
                min = (duration < min) ? duration : min;
                total += duration;
            }
            long avg = total / size;
           
            median = calculateMedianValue(durations);
            setMapStat(size, max, min, avg, median);
        } else {
            int m = conf.getInt("mapred.map.tasks", 1);
            if (m > 0) {
                setMapStat(m, -1, -1, -1, -1);
            }
        }
       
        TaskReport[] reduces = null;
        try {
            reduces = client.getReduceTaskReports(jobId);
        } catch (IOException e) {
            LOG.warn("Failed to get reduce task report", e);
        }
        if (reduces != null && reduces.length > 0) {
            int size = reduces.length;
            long max = 0;
            long min = Long.MAX_VALUE;
            long median = 0;
            long total = 0;
            long durations[] = new long[size];
           
            for (int i = 0; i < reduces.length; i++) {
              TaskReport rpt = reduces[i];
                long duration = rpt.getFinishTime() - rpt.getStartTime();
                durations[i] = duration;
                max = (duration > max) ? duration : max;
                min = (duration < min) ? duration : min;
                total += duration;
            }
View Full Code Here

            long median = 0;
            long total = 0;
            long durations[] = new long[size];
           
            for (int i = 0; i < maps.length; i++) {
              TaskReport rpt = maps[i];
                long duration = rpt.getFinishTime() - rpt.getStartTime();
                durations[i] = duration;
                max = (duration > max) ? duration : max;
                min = (duration < min) ? duration : min;
                total += duration;
            }
            long avg = total / size;
           
            median = calculateMedianValue(durations);
            setMapStat(size, max, min, avg, median);
        } else {
            int m = conf.getInt("mapred.map.tasks", 1);
            if (m > 0) {
                setMapStat(m, -1, -1, -1, -1);
            }
        }
       
        TaskReport[] reduces = null;
        try {
            reduces = client.getReduceTaskReports(jobId);
        } catch (IOException e) {
            LOG.warn("Failed to get reduce task report", e);
        }
        if (reduces != null && reduces.length > 0) {
            int size = reduces.length;
            long max = 0;
            long min = Long.MAX_VALUE;
            long median = 0;
            long total = 0;
            long durations[] = new long[size];
           
            for (int i = 0; i < reduces.length; i++) {
              TaskReport rpt = reduces[i];
                long duration = rpt.getFinishTime() - rpt.getStartTime();
                durations[i] = duration;
                max = (duration > max) ? duration : max;
                min = (duration < min) ? duration : min;
                total += duration;
            }
View Full Code Here

      long[] reduceExecutionTimes = new long[reduces.length];
      Date date = Calendar.getInstance().getTime();
      long startTime = date.getTime();
      long finishTime = 0;
      for (int j = 0; j < maps.length; j++) {
        TaskReport map = maps[j];
        long thisStartTime = map.getStartTime();
        long thisFinishTime = map.getFinishTime();
        if (thisStartTime > 0 && thisFinishTime > 0) {
          mapExecutionTimes[j] = thisFinishTime - thisStartTime;
        }
        if (startTime > thisStartTime) {
          startTime = thisStartTime;
        }
        if (finishTime < thisFinishTime) {
          finishTime = thisFinishTime;
        }
      }

      theTaskExecutionStats.computeStats("mapExecutionTimeStats",
          mapExecutionTimes);

      retv.put(mapreduceID + "." + jobName + "." + "mapStartTime", ""
          + startTime);
      retv.put(mapreduceID + "." + jobName + "." + "mapEndTime", ""
          + finishTime);
      for (int j = 0; j < reduces.length; j++) {
        TaskReport reduce = reduces[j];
        long thisStartTime = reduce.getStartTime();
        long thisFinishTime = reduce.getFinishTime();
        if (thisStartTime > 0 && thisFinishTime > 0) {
          reduceExecutionTimes[j] = thisFinishTime - thisStartTime;
        }
        if (startTime > thisStartTime) {
          startTime = thisStartTime;
View Full Code Here

    Collection<FlowSliceStats> children = flowNodeStats.getChildren();

    for( FlowSliceStats sliceStats : children )
      {
      TaskReport taskReport = ( (HadoopSliceStats) sliceStats ).getTaskReport();

      Counters counters = taskReport.getCounters();

      for( Counters.Group group : counters )
        {
        Map<String, Long> values = allCounters.get( group.getName() );
View Full Code Here

    {
    synchronized( sliceStatsMap )
      {
      for( int i = 0; i < taskReports.length - ( skipLast ? 1 : 0 ); i++ )
        {
        TaskReport taskReport = taskReports[ i ];

        if( taskReport == null )
          {
          LOG.warn( "found empty task report" );
          continue;
          }

        String id = getSliceIDFor( taskReport.getTaskID() );
        sliceStatsMap.put( id, new HadoopSliceStats( id, getParentStatus(), kind, taskReport ) );
        }
      }
    }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapred.TaskReport

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.