Package org.apache.hadoop.mapred

Examples of org.apache.hadoop.mapred.TaskAttemptID


  /**
   * clean previous std error and outs
   */

  private void initStdOut(JobConf configuration) {
    TaskAttemptID taskId = TaskAttemptID.forName(configuration
            .get(MRJobConfig.TASK_ATTEMPT_ID));
    File stdOut = TaskLog.getTaskLogFile(taskId, false, TaskLog.LogName.STDOUT);
    File stdErr = TaskLog.getTaskLogFile(taskId, false, TaskLog.LogName.STDERR);
    // prepare folder
    if (!stdOut.getParentFile().exists()) {
View Full Code Here


      stdErr.deleteOnExit();
    }
  }

  private String readStdOut(JobConf conf) throws Exception {
    TaskAttemptID taskId = TaskAttemptID.forName(conf
            .get(MRJobConfig.TASK_ATTEMPT_ID));
    File stdOut = TaskLog.getTaskLogFile(taskId, false, TaskLog.LogName.STDOUT);

    return readFile(stdOut);
View Full Code Here

  // Validate stderr task log of given task type(validates 1st
  // task of that type).
  void validateTaskStderr(StreamJob job, TaskType type)
      throws IOException {
    TaskAttemptID attemptId =
        new TaskAttemptID(new TaskID(job.jobId_, type, 0), 0);

    String log = MapReduceTestUtil.readTaskLog(TaskLog.LogName.STDERR,
        attemptId, false);

    // trim() is called on expectedStderr here because the method
View Full Code Here

  // Validate stderr task log of given task type(validates 1st
  // task of that type).
  void validateTaskStderr(StreamJob job, TaskType type)
      throws IOException {
    TaskAttemptID attemptId =
        new TaskAttemptID(new TaskID(job.jobId_, type, 0), 0);

    String log = MapReduceTestUtil.readTaskLog(TaskLog.LogName.STDERR,
        attemptId, false);

    // trim() is called on expectedStderr here because the method
View Full Code Here

    }
    cmd.add(executable);
    // wrap the command in a stdout/stderr capture
    // we are starting map/reduce task of the pipes job. this is not a cleanup
    // attempt.
    TaskAttemptID taskid =
      TaskAttemptID.forName(conf.get(MRJobConfig.TASK_ATTEMPT_ID));
    File stdout = TaskLog.getTaskLogFile(taskid, false, TaskLog.LogName.STDOUT);
    File stderr = TaskLog.getTaskLogFile(taskid, false, TaskLog.LogName.STDERR);
    long logLength = TaskLog.getTaskLogLength(conf);
    cmd = TaskLog.captureOutAndError(null, cmd, stdout, stderr, logLength,
View Full Code Here

    List<String> cmd = new ArrayList<String>();
    String executable = DistributedCache.getLocalCacheFiles(conf)[0].toString();
    FileUtil.chmod(executable, "a+x");
    cmd.add(executable);
    // wrap the command in a stdout/stderr capture
    TaskAttemptID taskid = TaskAttemptID.forName(conf.get("mapred.task.id"));
    File stdout = TaskLog.getTaskLogFile(taskid, TaskLog.LogName.STDOUT);
    File stderr = TaskLog.getTaskLogFile(taskid, TaskLog.LogName.STDERR);
    long logLength = TaskLog.getTaskLogLength(conf);
    cmd = TaskLog.captureOutAndError(cmd, stdout, stderr, logLength);
View Full Code Here

    HiveApiOutputFormat.initProfile(conf, outputDesc, profile);

    HiveApiOutputFormat outputFormat = new HiveApiOutputFormat();
    outputFormat.setMyProfileId(profile);

    TaskAttemptID taskAttemptID = TaskAttemptID.forName(taskAttemptIdStr);
    TaskAttemptContext taskContext = new HackTaskAttemptContext(new JobConf(conf), taskAttemptID);
    JobContext jobContext = new HackJobContext(new JobConf(conf), taskAttemptID.getJobID());

    RecordWriterImpl recordWriter = outputFormat.getRecordWriter(taskContext);

    HiveApiOutputCommitter committer = outputFormat.getOutputCommitter(taskContext);
    committer.setupJob(jobContext);
View Full Code Here

    if (System.getProperty("hadoop.log.dir") == null
        && System.getenv("HADOOP_LOG_DIR") != null)
      System.setProperty("hadoop.log.dir", System.getenv("HADOOP_LOG_DIR"));

    // wrap the command in a stdout/stderr capture
    TaskAttemptID taskid = TaskAttemptID.forName(job.get("mapred.task.id"));
    File stdout = TaskLog.getTaskLogFile(taskid, false, TaskLog.LogName.STDOUT);
    File stderr = TaskLog.getTaskLogFile(taskid, false, TaskLog.LogName.STDERR);
    long logLength = TaskLog.getTaskLogLength(job);
    command = TaskLog.captureOutAndError(null, command, stdout, stderr, logLength, false);
    stdout.getParentFile().mkdirs();
View Full Code Here

   *
   * @param conf Configuration
   */
  public PerThread(Configuration conf) {
    this.conf = HiveUtils.newHiveConf(conf, OutputCmd.class);
    this.taskID = new TaskAttemptID("hiveio_output", 42, true,
        (int) Thread.currentThread().getId(), 0);
  }
View Full Code Here

    HiveApiOutputFormat.initProfile(conf, outputDesc, profile);

    HiveApiOutputFormat outputFormat = new HiveApiOutputFormat();
    outputFormat.setMyProfileId(profile);

    TaskAttemptID taskAttemptID = TaskAttemptID.forName(taskAttemptIdStr);
    TaskAttemptContext taskContext = new HackTaskAttemptContext(new JobConf(conf), taskAttemptID);
    JobContext jobContext = new HackJobContext(new JobConf(conf), taskAttemptID.getJobID());

    RecordWriterImpl recordWriter = outputFormat.getRecordWriter(taskContext);

    HiveApiOutputCommitter committer = outputFormat.getOutputCommitter(taskContext);
    committer.setupJob(jobContext);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapred.TaskAttemptID

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.