Package org.apache.hadoop.mapreduce

Examples of org.apache.hadoop.mapreduce.TaskAttemptContext


        }
        if(reader != null){
            reader.close();
        }
        InputSplit curSplit = inpSplits.get(curSplitIndex);
        TaskAttemptContext tAContext = new TaskAttemptContext(conf,
                new TaskAttemptID());
        reader = inputFormat.createRecordReader(curSplit, tAContext);
        reader.initialize(curSplit, tAContext);
        // create a dummy pigsplit - other than the actual split, the other
        // params are really not needed here where we are just reading the
View Full Code Here


      
        // make a copy of the Context to use here - since in the same
        // task (map or reduce) we could have multiple stores, we should
        // make this copy so that the same context does not get over-written
        // by the different stores.
        this.context = new TaskAttemptContext(outputConf,
                context.getTaskAttemptID());
    }
View Full Code Here

        List<Pair<OutputCommitter, POStore>> committers =
            new ArrayList<Pair<OutputCommitter,POStore>>();
        for (POStore store : stores) {
            StoreFuncInterface sFunc = store.getStoreFunc();
           
            TaskAttemptContext updatedContext = setUpContext(context, store);
            try {
                committers.add(new Pair<OutputCommitter, POStore>(
                        sFunc.getOutputFormat().getOutputCommitter(
                                updatedContext), store));
            } catch (InterruptedException e) {
View Full Code Here

            POStore store) throws IOException {
        // Setup UDFContext so StoreFunc can make use of it
        MapRedUtil.setupUDFContext(context.getConfiguration());
        // make a copy of the context so that the actions after this call
        // do not end up updating the same context
        TaskAttemptContext contextCopy = new TaskAttemptContext(
                context.getConfiguration(), context.getTaskAttemptID());
       
        // call setLocation() on the storeFunc so that if there are any
        // side effects like setting map.output.dir on the Configuration
        // in the Context are needed by the OutputCommitter, those actions
View Full Code Here

    @Override
    public void abortTask(TaskAttemptContext context) throws IOException {       
        if(context.getTaskAttemptID().isMap()) {
            for (Pair<OutputCommitter, POStore> mapCommitter :
                mapOutputCommitters) {
                TaskAttemptContext updatedContext = setUpContext(context,
                        mapCommitter.second);
                mapCommitter.first.abortTask(updatedContext);
            }
        } else {
            for (Pair<OutputCommitter, POStore> reduceCommitter :
                reduceOutputCommitters) {
                TaskAttemptContext updatedContext = setUpContext(context,
                        reduceCommitter.second);
                reduceCommitter.first.abortTask(updatedContext);
            }
        }
    }
View Full Code Here

    @Override
    public void commitTask(TaskAttemptContext context) throws IOException {
        if(context.getTaskAttemptID().isMap()) {
            for (Pair<OutputCommitter, POStore> mapCommitter :
                mapOutputCommitters) {
                TaskAttemptContext updatedContext = setUpContext(context,
                        mapCommitter.second);
                mapCommitter.first.commitTask(updatedContext);
            }
        } else {
            for (Pair<OutputCommitter, POStore> reduceCommitter :
                reduceOutputCommitters) {
                TaskAttemptContext updatedContext = setUpContext(context,
                        reduceCommitter.second);
                reduceCommitter.first.commitTask(updatedContext);
            }
        }
    }
View Full Code Here

            throws IOException {
        boolean needCommit = false;
        if(context.getTaskAttemptID().isMap()) {
            for (Pair<OutputCommitter, POStore> mapCommitter :
                mapOutputCommitters) {
                TaskAttemptContext updatedContext = setUpContext(context,
                        mapCommitter.second);
                needCommit = needCommit ||
                mapCommitter.first.needsTaskCommit(updatedContext);
            }
            return needCommit;
        } else {
            for (Pair<OutputCommitter, POStore> reduceCommitter :
                reduceOutputCommitters) {
                TaskAttemptContext updatedContext = setUpContext(context,
                        reduceCommitter.second);
                needCommit = needCommit ||
                reduceCommitter.first.needsTaskCommit(updatedContext);
            }
            return needCommit;
View Full Code Here

    @Override
    public void setupTask(TaskAttemptContext context) throws IOException {
        if(context.getTaskAttemptID().isMap()) {
            for (Pair<OutputCommitter, POStore> mapCommitter :
                mapOutputCommitters) {
                TaskAttemptContext updatedContext = setUpContext(context,
                        mapCommitter.second);
                mapCommitter.first.setupTask(updatedContext);
            }
        } else {
            for (Pair<OutputCommitter, POStore> reduceCommitter :
                reduceOutputCommitters) {
                TaskAttemptContext updatedContext = setUpContext(context,
                        reduceCommitter.second);
                reduceCommitter.first.setupTask(updatedContext);
            }
        }
    }
View Full Code Here

  @Test
  public void test_LATEST_TIMESTAMP_isReplaced()
  throws Exception {
    Configuration conf = new Configuration(this.util.getConfiguration());
    RecordWriter<ImmutableBytesWritable, KeyValue> writer = null;
    TaskAttemptContext context = null;
    Path dir =
      util.getDataTestDir("test_LATEST_TIMESTAMP_isReplaced");
    try {
      Job job = new Job(conf);
      FileOutputFormat.setOutputPath(job, dir);
View Full Code Here

    return TaskAttemptContext.class.isInterface();
  }

  private TaskAttemptContext getTestTaskAttemptContext(final Job job)
  throws IOException, Exception {
    TaskAttemptContext context;
    if (isPost020MapReduce()) {
      TaskAttemptID id =
        TaskAttemptID.forName("attempt_200707121733_0001_m_000000_0");
      Class<?> clazz =
        Class.forName("org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl");
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.TaskAttemptContext

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.