Examples of TaskAttemptContext


Examples of org.apache.hadoop.mapreduce.TaskAttemptContext

        POStore store = new POStore(new OperatorKey());
        store.setSFile(new FileSpec(file, storeFuncSpec));
        PigOutputFormat.setLocation(jc, store);
        OutputCommitter oc;
        // create a simulated TaskAttemptContext
        TaskAttemptContext tac = new TaskAttemptContext(conf, new TaskAttemptID());
        PigOutputFormat.setLocation(tac, store);
        RecordWriter<?,?> rw ;
        try {
            of.checkOutputSpecs(jc);
            oc = of.getOutputCommitter(tac);
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptContext

        }
        if(reader != null){
            reader.close();
        }
        InputSplit curSplit = inpSplits.get(curSplitIndex);
        TaskAttemptContext tAContext = new TaskAttemptContext(conf,
                new TaskAttemptID());
        reader = inputFormat.createRecordReader(curSplit, tAContext);
        reader.initialize(curSplit, tAContext);
        // create a dummy pigsplit - other than the actual split, the other
        // params are really not needed here where we are just reading the
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptContext

      
        // make a copy of the Context to use here - since in the same
        // task (map or reduce) we could have multiple stores, we should
        // make this copy so that the same context does not get over-written
        // by the different stores.
        this.context = new TaskAttemptContext(outputConf,
                context.getTaskAttemptID());
    }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptContext

        List<Pair<OutputCommitter, POStore>> committers =
            new ArrayList<Pair<OutputCommitter,POStore>>();
        for (POStore store : stores) {
            StoreFuncInterface sFunc = store.getStoreFunc();
           
            TaskAttemptContext updatedContext = setUpContext(context, store);
            try {
                committers.add(new Pair<OutputCommitter, POStore>(
                        sFunc.getOutputFormat().getOutputCommitter(
                                updatedContext), store));
            } catch (InterruptedException e) {
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptContext

            POStore store) throws IOException {
        // Setup UDFContext so StoreFunc can make use of it
        MapRedUtil.setupUDFContext(context.getConfiguration());
        // make a copy of the context so that the actions after this call
        // do not end up updating the same context
        TaskAttemptContext contextCopy = new TaskAttemptContext(
                context.getConfiguration(), context.getTaskAttemptID());
       
        // call setLocation() on the storeFunc so that if there are any
        // side effects like setting map.output.dir on the Configuration
        // in the Context are needed by the OutputCommitter, those actions
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptContext

    @Override
    public void abortTask(TaskAttemptContext context) throws IOException {       
        if(context.getTaskAttemptID().isMap()) {
            for (Pair<OutputCommitter, POStore> mapCommitter :
                mapOutputCommitters) {
                TaskAttemptContext updatedContext = setUpContext(context,
                        mapCommitter.second);
                mapCommitter.first.abortTask(updatedContext);
            }
        } else {
            for (Pair<OutputCommitter, POStore> reduceCommitter :
                reduceOutputCommitters) {
                TaskAttemptContext updatedContext = setUpContext(context,
                        reduceCommitter.second);
                reduceCommitter.first.abortTask(updatedContext);
            }
        }
    }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptContext

    @Override
    public void commitTask(TaskAttemptContext context) throws IOException {
        if(context.getTaskAttemptID().isMap()) {
            for (Pair<OutputCommitter, POStore> mapCommitter :
                mapOutputCommitters) {
                TaskAttemptContext updatedContext = setUpContext(context,
                        mapCommitter.second);
                mapCommitter.first.commitTask(updatedContext);
            }
        } else {
            for (Pair<OutputCommitter, POStore> reduceCommitter :
                reduceOutputCommitters) {
                TaskAttemptContext updatedContext = setUpContext(context,
                        reduceCommitter.second);
                reduceCommitter.first.commitTask(updatedContext);
            }
        }
    }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptContext

            throws IOException {
        boolean needCommit = false;
        if(context.getTaskAttemptID().isMap()) {
            for (Pair<OutputCommitter, POStore> mapCommitter :
                mapOutputCommitters) {
                TaskAttemptContext updatedContext = setUpContext(context,
                        mapCommitter.second);
                needCommit = needCommit ||
                mapCommitter.first.needsTaskCommit(updatedContext);
            }
            return needCommit;
        } else {
            for (Pair<OutputCommitter, POStore> reduceCommitter :
                reduceOutputCommitters) {
                TaskAttemptContext updatedContext = setUpContext(context,
                        reduceCommitter.second);
                needCommit = needCommit ||
                reduceCommitter.first.needsTaskCommit(updatedContext);
            }
            return needCommit;
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptContext

    @Override
    public void setupTask(TaskAttemptContext context) throws IOException {
        if(context.getTaskAttemptID().isMap()) {
            for (Pair<OutputCommitter, POStore> mapCommitter :
                mapOutputCommitters) {
                TaskAttemptContext updatedContext = setUpContext(context,
                        mapCommitter.second);
                mapCommitter.first.setupTask(updatedContext);
            }
        } else {
            for (Pair<OutputCommitter, POStore> reduceCommitter :
                reduceOutputCommitters) {
                TaskAttemptContext updatedContext = setUpContext(context,
                        reduceCommitter.second);
                reduceCommitter.first.setupTask(updatedContext);
            }
        }
    }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptContext

  @Test
  public void test_LATEST_TIMESTAMP_isReplaced()
  throws Exception {
    Configuration conf = new Configuration(this.util.getConfiguration());
    RecordWriter<ImmutableBytesWritable, KeyValue> writer = null;
    TaskAttemptContext context = null;
    Path dir =
      util.getDataTestDir("test_LATEST_TIMESTAMP_isReplaced");
    try {
      Job job = new Job(conf);
      FileOutputFormat.setOutputPath(job, dir);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.