Package org.apache.hadoop.mapred

Examples of org.apache.hadoop.mapred.InputSplit


  public final ExpectedSuppliedException thrown = ExpectedSuppliedException
      .none();

  @Test
  public void testGetInputSplitForMapper() {
    final InputSplit split = new MockReporter(MockReporter.ReporterType.Mapper,
        null).getInputSplit();
    assertTrue(null != split);
  }
View Full Code Here


      long seed = r.nextLong();
      r.setSeed(seed);
      LOG.debug("seed: " + seed);
      // shuffle splits
      for (int i = 0; i < splits.length; ++i) {
        InputSplit tmp = splits[i];
        int j = r.nextInt(splits.length);
        splits[i] = splits[j];
        splits[j] = tmp;
      }
      // our target rate is in terms of the maximum number of sample splits,
View Full Code Here

    proportion = 5L*proportion;

    // shuffle splits
    for (int i = 0; i < splits.length; ++i)
    {
      InputSplit tmp = splits[i];
      int j = r.nextInt(splits.length);
      splits[i] = splits[j];
      splits[j] = tmp;
    }

View Full Code Here

      long seed = r.nextLong();
      r.setSeed(seed);
      LOG.debug("seed: " + seed);
      // shuffle splits
      for (int i = 0; i < splits.length; ++i) {
        InputSplit tmp = splits[i];
        int j = r.nextInt(splits.length);
        splits[i] = splits[j];
        splits[j] = tmp;
      }
      // our target rate is in terms of the maximum number of sample splits,
View Full Code Here

  // getting the mapper which can process the input split
  public Class<AbstractMobiusMapper> getMapper(InputSplit split, JobConf conf)
    throws IOException
  {
    TaggedInputSplit taggedSplit  = (TaggedInputSplit)split;
    InputSplit inputSplit      = taggedSplit.getInputSplit();
    URI currentFileURI  = MultiInputsHelpersRepository.getInstance(conf).getURIBySplit(inputSplit, conf);
   
    try
    {
      String[] pathToMapperMappings = conf.get("mapred.input.dir.mappers").split(",");
View Full Code Here

    // The <code>split</code> is an instance of {@link TaggedInputSplit}
    // but the TaggedInputSplit is not a public class, so we need to place
    // this class under the package of org.apache.hadoop.mapred.lib.
   
    TaggedInputSplit taggedSplit  = (TaggedInputSplit)split;
    InputSplit inputSplit      = taggedSplit.getInputSplit();   
    URI currentFileURI        = MultiInputsHelpersRepository.getInstance(conf).getURIBySplit(inputSplit, conf);
    String currentFile        = currentFileURI.toString();
   
   
    LOGGER.debug("Using ["+currentFile+"] to locate current Dataset");
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapred.InputSplit

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.