Examples of JobConf


Examples of cgl.imr.base.impl.JobConf

    long beforeTime = System.currentTimeMillis();

    System.out.println("Number of Map tasks: " + numberOfMapTasks
        + "Number of Map tasks: " + numberOfReduceTasks);

    JobConf jobConf = new JobConf("Dijkstra Shortest Path"
        + uuidGen.generateTimeBasedUUID());

    jobConf.setMapperClass(AllPairsShortestPathMap.class);
    jobConf.setReducerClass(AllPairsShortestPathReduce.class);
    jobConf.setCombinerClass(AllPairsShortestPathCombiner.class);
    jobConf.setNumMapTasks(numberOfMapTasks);
    jobConf.setNumReduceTasks(numberOfReduceTasks);

    TwisterModel mrDriver = new TwisterDriver(jobConf);
    mrDriver.configureMaps();

    int loopCount = 0;
View Full Code Here

Examples of com.dp.nebula.wormhole.common.config.JobConf

public class ParseXMLUtilTest {

  @Test
  public void testLoadJobConf() {
    String fileName = "src/test/resources/wormhole_hivereader_to_hdfswriter_test.xml";
    JobConf jobConf = ParseXMLUtil.loadJobConf(fileName);
    assertNotNull(jobConf);
    assertEquals("hivereader_to_hdfswriter_job", jobConf.getId());
   
    JobPluginConf readerConf =  jobConf.getReaderConf();
    List<JobPluginConf> writerConf = jobConf.getWriterConfs();
   
    assertEquals("hivereader", readerConf.getPluginName());
    IParam readerPluginParam = readerConf.getPluginParam();
    assertNotNull(readerPluginParam);
    assertTrue(readerPluginParam instanceof IParam);
View Full Code Here

Examples of org.apache.hadoop.mapred.JobConf

    System.setProperty("hadoop.log.dir", logDir);
    c.set("mapred.output.dir", tmpDir);
    mrCluster = new MiniMRCluster(servers,
      FileSystem.get(conf).getUri().toString(), 1);
    LOG.info("Mini mapreduce cluster started");
    JobConf mrClusterJobConf = mrCluster.createJobConf();
    c.set("mapred.job.tracker", mrClusterJobConf.get("mapred.job.tracker"));
    /* this for mrv2 support */
    conf.set("mapreduce.framework.name", "yarn");
    String rmAdress = mrClusterJobConf.get("yarn.resourcemanager.address");
    if (rmAdress != null) {
      conf.set("yarn.resourcemanager.address", rmAdress);
    }
    String schedulerAdress =
      mrClusterJobConf.get("yarn.resourcemanager.scheduler.address");
    if (schedulerAdress != null) {
      conf.set("yarn.resourcemanager.scheduler.address", schedulerAdress);
    }
  }
View Full Code Here

Examples of org.apache.hadoop.mapred.JobConf

    }

    testMerge_ = (-1 != userJobConfProps_.toString().indexOf("stream.testmerge"));

    // general MapRed job properties
    jobConf_ = new JobConf(config_);
   
    // All streaming jobs have, by default, no time-out for tasks
    jobConf_.setLong("mapred.task.timeout", 0);

    setUserJobConfProps(true);
View Full Code Here

Examples of org.apache.hadoop.mapred.JobConf

  /** Uses default mapper with no reduces for a map-only identity job. */
  @Test
  @SuppressWarnings("deprecation")
  public void testMapOnly() throws Exception {
    JobConf job = new JobConf();
    String inDir = System.getProperty("share.dir",".")+"/test/data";
    Path input = new Path(inDir+"/weather.avro");
    Path output = new Path(System.getProperty("test.dir",".")+"/weather-ident");
   
    output.getFileSystem(job).delete(output);
   
    job.setJobName("identity map weather");
   
    AvroJob.setInputSchema(job, Weather.SCHEMA$);
    AvroJob.setMapOutputSchema(job, Weather.SCHEMA$);

    FileInputFormat.setInputPaths(job, input);
    FileOutputFormat.setOutputPath(job, output);
    FileOutputFormat.setCompressOutput(job, true);
   
    job.setNumReduceTasks(0);                     // map-only
   
    JobClient.runJob(job);

    // check output is correct
    DatumReader<Weather> reader = new SpecificDatumReader<Weather>();
View Full Code Here

Examples of org.apache.hadoop.mapred.JobConf

  }   

  @Test
  @SuppressWarnings("deprecation")
  public void testSort() throws Exception {
    JobConf job = new JobConf();
    String inDir = System.getProperty("share.dir",".")+"/test/data";
    Path input = new Path(inDir+"/weather.avro");
    Path output = new Path(System.getProperty("test.dir",".")+"/weather-sort");
   
    output.getFileSystem(job).delete(output);
   
    job.setJobName("sort weather");
   
    AvroJob.setInputSchema(job, Weather.SCHEMA$);
    AvroJob.setMapOutputSchema
      (job, Pair.getPairSchema(Weather.SCHEMA$, Schema.create(Type.NULL)));
    AvroJob.setOutputSchema(job, Weather.SCHEMA$);
View Full Code Here

Examples of org.apache.hadoop.mapred.JobConf

    reader.close();
  }

  @Test
  public void testSequenceFileInputFormat() throws Exception {
    JobConf job = new JobConf();
    Path output = new Path(System.getProperty("test.dir",".")+"/seq-out");

    output.getFileSystem(job).delete(output);
   
    Schema schema = Pair.getPairSchema(Schema.create(Schema.Type.LONG),
View Full Code Here

Examples of org.apache.hadoop.mapred.JobConf

  public void setConf(Configuration conf) {
    if (conf instanceof JobConf) {
      this.conf = (JobConf) conf;
    } else {
      this.conf = new JobConf(conf);
    }
  }
View Full Code Here

Examples of org.apache.hadoop.mapred.JobConf

      EnumSet<Options> flags) throws IOException {
    LOG.info("srcPaths=" + srcPaths);
    LOG.info("destPath=" + destPath);
    checkSrcPath(conf, srcPaths);

    JobConf job = createJobConf(conf);
    //Initialize the mapper
    try {
      setup(conf, job, srcPaths, destPath, logPath, flags);
      JobClient.runJob(job);
    } finally {
      //delete tmp
      fullyDelete(job.get(TMP_DIR_LABEL), job);
      //delete jobDirectory
      fullyDelete(job.get(JOB_DIR_LABEL), job);
    }
  }
View Full Code Here

Examples of org.apache.hadoop.mapred.JobConf

    }
    return 0;
  }

  public static void main(String[] args) throws Exception {
    JobConf job = new JobConf(CopyFiles.class);
    CopyFiles distcp = new CopyFiles(job);
    int res = ToolRunner.run(distcp, args);
    System.exit(res);
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.