Package org.apache.hadoop.mapred.JobClient

Examples of org.apache.hadoop.mapred.JobClient.RawSplit


            factory.getSerializer(splits.get(0).getClass());
          serializer.open(buffer);
          for (int i = 0; i < splits.size(); i++) {
            buffer.reset();
            serializer.serialize(splits.get(i));
            RawSplit rawSplit = new RawSplit();
            rawSplit.setClassName(splits.get(i).getClass().getName());
            rawSplit.setDataLength(splits.get(i).getLength());
            rawSplit.setBytes(buffer.getData(), 0, buffer.getLength());
            rawSplit.setLocations(splits.get(i).getLocations());
            rawSplits[i] = rawSplit;
          }

        } else {
          InputSplit[] splits = job.getInputFormat().getSplits(job, 1);
          rawSplits = new RawSplit[splits.length];
          DataOutputBuffer buffer = new DataOutputBuffer();
          for (int i = 0; i < splits.length; i++) {
            buffer.reset();
            splits[i].write(buffer);
            RawSplit rawSplit = new RawSplit();
            rawSplit.setClassName(splits[i].getClass().getName());
            rawSplit.setDataLength(splits[i].getLength());
            rawSplit.setBytes(buffer.getData(), 0, buffer.getLength());
            rawSplit.setLocations(splits[i].getLocations());
            rawSplits[i] = rawSplit;
          }
        }
       
        int numReduceTasks = job.getNumReduceTasks();
View Full Code Here


            factory.getSerializer(splits.get(0).getClass());
          serializer.open(buffer);
          for (int i = 0; i < splits.size(); i++) {
            buffer.reset();
            serializer.serialize(splits.get(i));
            RawSplit rawSplit = new RawSplit();
            rawSplit.setClassName(splits.get(i).getClass().getName());
            rawSplit.setDataLength(splits.get(i).getLength());
            rawSplit.setBytes(buffer.getData(), 0, buffer.getLength());
            rawSplit.setLocations(splits.get(i).getLocations());
            rawSplits[i] = rawSplit;
          }

        } else {
          InputSplit[] splits = job.getInputFormat().getSplits(job, 1);
          rawSplits = new RawSplit[splits.length];
          DataOutputBuffer buffer = new DataOutputBuffer();
          for (int i = 0; i < splits.length; i++) {
            buffer.reset();
            splits[i].write(buffer);
            RawSplit rawSplit = new RawSplit();
            rawSplit.setClassName(splits[i].getClass().getName());
            rawSplit.setDataLength(splits[i].getLength());
            rawSplit.setBytes(buffer.getData(), 0, buffer.getLength());
            rawSplit.setLocations(splits[i].getLocations());
            rawSplits[i] = rawSplit;
          }
        }
       
        int numReduceTasks = job.getNumReduceTasks();
View Full Code Here

      System.out.println(estOutSize);
      assertEquals(0, estOutSize);
     
      TaskStatus ts = new MapTaskStatus();
      ts.setOutputSize(singleMapOutputSize);
      RawSplit split = new RawSplit();
      split.setDataLength(0);
      TaskInProgress tip = new TaskInProgress(jid, "", split, null, jc, jip, 0);
      re.updateWithCompletedTask(ts, tip);
    }
   
    assertEquals(2* singleMapOutputSize, re.getEstimatedMapOutputSize());
View Full Code Here

      System.out.println(estOutSize);
      assertEquals(0, estOutSize);
     
      TaskStatus ts = new MapTaskStatus();
      ts.setOutputSize(singleMapOutputSize);
      RawSplit split = new RawSplit();
      split.setDataLength(0);
      TaskInProgress tip = new TaskInProgress(jid, "", split, null, jc, jip, 0);
      re.updateWithCompletedTask(ts, tip);
    }
    assertEquals(2* singleMapOutputSize, re.getEstimatedMapOutputSize());
    assertEquals(2* singleMapOutputSize * maps / reduces, re.getEstimatedReduceInputSize());
View Full Code Here

      System.out.println(estOutSize);
      assertEquals(0, estOutSize);
     
      TaskStatus ts = new MapTaskStatus();
      ts.setOutputSize(singleMapOutputSize);
      RawSplit split = new RawSplit();
      split.setDataLength(singleMapInputSize);
      TaskInProgress tip = new TaskInProgress(jid, "", split, null, jc, jip, 0);
      re.updateWithCompletedTask(ts, tip);
    }
   
    assertEquals(2* singleMapOutputSize, re.getEstimatedMapOutputSize());
    assertEquals(2* singleMapOutputSize * maps / reduces, re.getEstimatedReduceInputSize());

    //add one more map task with input size as 0
    TaskStatus ts = new MapTaskStatus();
    ts.setOutputSize(singleMapOutputSize);
    RawSplit split = new RawSplit();
    split.setDataLength(0);
    TaskInProgress tip = new TaskInProgress(jid, "", split, null, jc, jip, 0);
    re.updateWithCompletedTask(ts, tip);
   
    long expectedTotalMapOutSize = (singleMapOutputSize*11) *
      ((maps*singleMapInputSize)+maps)/((singleMapInputSize+1)*10+1);
 
View Full Code Here

            factory.getSerializer(splits.get(0).getClass());
          serializer.open(buffer);
          for (int i = 0; i < splits.size(); i++) {
            buffer.reset();
            serializer.serialize(splits.get(i));
            RawSplit rawSplit = new RawSplit();
            rawSplit.setClassName(splits.get(i).getClass().getName());
            rawSplit.setDataLength(splits.get(i).getLength());
            rawSplit.setBytes(buffer.getData(), 0, buffer.getLength());
            rawSplit.setLocations(splits.get(i).getLocations());
            rawSplits[i] = rawSplit;
          }

        } else {
          InputSplit[] splits = job.getInputFormat().getSplits(job, 1);
          rawSplits = new RawSplit[splits.length];
          DataOutputBuffer buffer = new DataOutputBuffer();
          for (int i = 0; i < splits.length; i++) {
            buffer.reset();
            splits[i].write(buffer);
            RawSplit rawSplit = new RawSplit();
            rawSplit.setClassName(splits[i].getClass().getName());
            rawSplit.setDataLength(splits[i].getLength());
            rawSplit.setBytes(buffer.getData(), 0, buffer.getLength());
            rawSplit.setLocations(splits[i].getLocations());
            rawSplits[i] = rawSplit;
          }
        }
       
        int numReduceTasks = job.getNumReduceTasks();
View Full Code Here

    if (splits == null || splits.length != numMapTasks) {
      throw new IllegalArgumentException("Input split size mismatch: expected="
          + numMapTasks + ", actual=" + ((splits == null) ? -1 : splits.length));
    }

    RawSplit rawSplits[] = new RawSplit[splits.length];
    for (int i = 0; i < splits.length; i++) {
      try {
        rawSplits[i] = new RawSplit();
        rawSplits[i].setClassName(splits[i].getClass().getName());
        rawSplits[i].setDataLength(splits[i].getLength());
        rawSplits[i].setLocations(splits[i].getLocations());
      } catch (InterruptedException ie) {
        throw new IOException(ie);
View Full Code Here

    JobID jobid = (JobID) taskAttemptID.getJobID();
    assert (jobid == getJobID());

    // Get splits for the TaskAttempt
    RawSplit split = splits[taskAttemptID.getTaskID().getId()];
    int locality = getClosestLocality(taskTracker, split);

    TaskID taskId = taskAttemptID.getTaskID();
    if (!taskId.isMap()) {
      assert false : "Task " + taskId + " is not MAP :";
View Full Code Here

    }

    RawSplit[] createSplits(){
      RawSplit[] splits = new RawSplit[numMapTasks];
      for (int i = 0; i < numMapTasks; i++) {
        splits[i] = new RawSplit();
        splits[i].setLocations(new String[0]);
      }
      return splits;
    }
View Full Code Here

      System.out.println(estOutSize);
      assertEquals(0, estOutSize);
     
      TaskStatus ts = new MapTaskStatus();
      ts.setOutputSize(singleMapOutputSize);
      RawSplit split = new RawSplit();
      split.setDataLength(0);
      TaskInProgress tip =
        new TaskInProgress(jid, "", split, jc, jip, 0, 1);
      re.updateWithCompletedTask(ts, tip);
    }
    assertEquals(2* singleMapOutputSize, re.getEstimatedMapOutputSize());
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapred.JobClient.RawSplit

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.