Package org.apache.hadoop.mapred

Examples of org.apache.hadoop.mapred.TaskAttemptID


    HiveApiOutputFormat outputFormat = new HiveApiOutputFormat();
    outputFormat.setMyProfileId(PROFILE_ID);

    JobConf jobConf = new JobConf(conf);
    TaskAttemptContext taskContext =
        new HackTaskAttemptContext(jobConf, new TaskAttemptID());
    JobContext jobContext = new HackJobContext(jobConf, taskContext.getJobID());
    outputFormat.checkOutputSpecs(jobContext);
    fail();
  }
View Full Code Here


      // In case of DefaultTaskController, set permissions here.
      FileUtil.chmod(executable, "u+x");
    }
    cmd.add(executable);
    // wrap the command in a stdout/stderr capture
    TaskAttemptID taskid = TaskAttemptID.forName(conf.get("mapred.task.id"));
    // we are starting map/reduce task of the pipes job. this is not a cleanup
    // attempt.
    File stdout = TaskLog.getTaskLogFile(taskid, false, TaskLog.LogName.STDOUT);
    File stderr = TaskLog.getTaskLogFile(taskid, false, TaskLog.LogName.STDERR);
    long logLength = TaskLog.getTaskLogLength(conf);
View Full Code Here

  /**
   * clean previous std error and outs
   */

  private void initStdOut(JobConf configuration) {
    TaskAttemptID taskId = TaskAttemptID.forName(configuration
            .get(MRJobConfig.TASK_ATTEMPT_ID));
    File stdOut = TaskLog.getTaskLogFile(taskId, false, TaskLog.LogName.STDOUT);
    File stdErr = TaskLog.getTaskLogFile(taskId, false, TaskLog.LogName.STDERR);
    // prepare folder
    if (!stdOut.getParentFile().exists()) {
View Full Code Here

      stdErr.deleteOnExit();
    }
  }

  private String readStdOut(JobConf conf) throws Exception {
    TaskAttemptID taskId = TaskAttemptID.forName(conf
            .get(MRJobConfig.TASK_ATTEMPT_ID));
    File stdOut = TaskLog.getTaskLogFile(taskId, false, TaskLog.LogName.STDOUT);

    return readFile(stdOut);
View Full Code Here

  /**
   * clean previous std error and outs
   */

  private void initStdOut(JobConf configuration) {
    TaskAttemptID taskId = TaskAttemptID.forName(configuration
            .get(MRJobConfig.TASK_ATTEMPT_ID));
    File stdOut = TaskLog.getTaskLogFile(taskId, false, TaskLog.LogName.STDOUT);
    File stdErr = TaskLog.getTaskLogFile(taskId, false, TaskLog.LogName.STDERR);
    // prepare folder
    if (!stdOut.getParentFile().exists()) {
View Full Code Here

      stdErr.deleteOnExit();
    }
  }

  private String readStdOut(JobConf conf) throws Exception {
    TaskAttemptID taskId = TaskAttemptID.forName(conf
            .get(MRJobConfig.TASK_ATTEMPT_ID));
    File stdOut = TaskLog.getTaskLogFile(taskId, false, TaskLog.LogName.STDOUT);

    return readFile(stdOut);
View Full Code Here

    if (System.getProperty("hadoop.log.dir") == null
        && System.getenv("HADOOP_LOG_DIR") != null)
      System.setProperty("hadoop.log.dir", System.getenv("HADOOP_LOG_DIR"));

    // wrap the command in a stdout/stderr capture
    TaskAttemptID taskid = TaskAttemptID.forName(job.get("mapred.task.id"));
    File stdout = TaskLog.getTaskLogFile(taskid, TaskLog.LogName.STDOUT);
    File stderr = TaskLog.getTaskLogFile(taskid, TaskLog.LogName.STDERR);
    long logLength = TaskLog.getTaskLogLength(job);
    command = TaskLog.captureOutAndError(command, stdout, stderr, logLength);
    stdout.getParentFile().mkdirs();
View Full Code Here

    }
    cmd.add(executable);
    // wrap the command in a stdout/stderr capture
    // we are starting map/reduce task of the pipes job. this is not a cleanup
    // attempt.
    TaskAttemptID taskid =
      TaskAttemptID.forName(conf.get(MRJobConfig.TASK_ATTEMPT_ID));
    File stdout = TaskLog.getTaskLogFile(taskid, false, TaskLog.LogName.STDOUT);
    File stderr = TaskLog.getTaskLogFile(taskid, false, TaskLog.LogName.STDERR);
    long logLength = TaskLog.getTaskLogLength(conf);
    cmd = TaskLog.captureOutAndError(null, cmd, stdout, stderr, logLength,
View Full Code Here

public class TestEventFetcher {

  @Test
  public void testConsecutiveFetch() throws IOException {
    final int MAX_EVENTS_TO_FETCH = 100;
    TaskAttemptID tid = new TaskAttemptID("12345", 1, TaskType.REDUCE, 1, 1);

    TaskUmbilicalProtocol umbilical = mock(TaskUmbilicalProtocol.class);
    when(umbilical.getMapCompletionEvents(any(JobID.class),
        anyInt(), anyInt(), any(TaskAttemptID.class)))
      .thenReturn(getMockedCompletionEventsUpdate(0, 0));
View Full Code Here

    ArrayList<TaskCompletionEvent> tceList =
        new ArrayList<TaskCompletionEvent>(numEvents);
    for (int i = 0; i < numEvents; ++i) {
      int eventIdx = startIdx + i;
      TaskCompletionEvent tce = new TaskCompletionEvent(eventIdx,
          new TaskAttemptID("12345", 1, TaskType.MAP, eventIdx, 0),
          eventIdx, true, TaskCompletionEvent.Status.SUCCEEDED,
          "http://somehost:8888");
      tceList.add(tce);
    }
    TaskCompletionEvent[] events = {};
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapred.TaskAttemptID

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.