Examples of createJobConf()


Examples of com.ebay.erl.mobius.core.builder.Dataset.createJobConf()

    {
      Dataset aDataset = this.datasets[assignedDatasetID];
      if( aColumn.getDataset().equals(aDataset) )
      {
        JobSetup.validateColumns(aDataset, aColumn);       
        Configuration aJobConf  = aDataset.createJobConf(assignedDatasetID);
        this.jobConf      = Util.merge(this.jobConf, aJobConf);       
        joinKeyPropertyName    = assignedDatasetID+".key.columns";
        break;
      }
    }
View Full Code Here

Examples of org.apache.hadoop.corona.MiniCoronaCluster.createJobConf()

      final Path outDir = new Path("./output");
      String input = "The quick brown fox\nhas many silly\nred fox sox\n";
      {
        LOG.info("launch job with fail tasks");
        // launch job with fail tasks
        JobConf jobConf = mr.createJobConf();
        jobConf.setOutputCommitter(CommitterWithLogs.class);
        JobClient jc = prepareJob(jobConf, inDir, outDir, input);
        RunningJob rJob = jc.submitJob(jobConf);
        jc.monitorAndPrintJob(jobConf, rJob);
        jt = (CoronaJobTracker) jc.jobSubmitClient;
View Full Code Here

Examples of org.apache.hadoop.mapred.MiniMRCluster.createJobConf()

  public void testWithLocal() throws Exception {
    MiniMRCluster mr = null;
    try {
      mr = new MiniMRCluster(2, "file:///", 3);
      Configuration conf = mr.createJobConf();
      runWordCount(conf);
      runMultiFileWordCount(conf);
    } finally {
      if (mr != null) { mr.shutdown(); }
    }
View Full Code Here

Examples of org.apache.hadoop.mapred.MiniMRCluster.createJobConf()

      MyFile[] files = createFiles(fs.getUri(), "/srcdat");
      long totsize = 0;
      for (MyFile f : files) {
        totsize += f.getSize();
      }
      Configuration job = mr.createJobConf();
      job.setLong("distcp.bytes.per.map", totsize / 3);
      ToolRunner.run(new DistCp(job),
          new String[] {"-m", "100",
                        "-log",
                        namenode+"/logs",
View Full Code Here

Examples of org.apache.hadoop.mapred.MiniMRCluster.createJobConf()

      System.out.println("wrote "
          + fileSys.getFileStatus(DEMUX_INPUT_PATH).getLen()
          + " bytes of temp test data");
      long ts_start = System.currentTimeMillis();
      runDemux(mr.createJobConf(), DEMUX_INPUT_PATH, DEMUX_OUTPUT_PATH);

      long time = (System.currentTimeMillis() - ts_start);
      long bytes = fileSys.getContentSummary(DEMUX_OUTPUT_PATH).getLength();
      System.out.println("result was " + bytes + " bytes long");
      System.out.println("processing took " + time + " milliseconds");
View Full Code Here

Examples of org.apache.hadoop.mapred.MiniMRCluster.createJobConf()

  public void testWithLocal() throws Exception {
    MiniMRCluster mr = null;
    try {
      mr = new MiniMRCluster(2, "file:///", 3);
      Configuration conf = mr.createJobConf();
      runWordCount(conf);
    } finally {
      if (mr != null) { mr.shutdown(); }
    }
  }
View Full Code Here

Examples of org.apache.hadoop.mapred.MiniMRCluster.createJobConf()

  public void testWithLocal() throws Exception {
    MiniMRCluster mr = null;
    try {
      mr = new MiniMRCluster(2, "file:///", 3);
      Configuration conf = mr.createJobConf();
      runWordCount(conf);
    } finally {
      if (mr != null) { mr.shutdown(); }
    }
  }
View Full Code Here

Examples of org.apache.hadoop.mapred.MiniMRCluster.createJobConf()

      }
      System.out.println("args=" + Arrays.asList(args).toString().replace(",", ",\n  "));
      System.out.println("newstatus=" + Arrays.asList(newstatus).toString().replace(",", ",\n  "));

      //run DistCh
      new DistCh(mr.createJobConf()).run(args);
      runLsr(shell, tree.root, 0);

      //check results
      for(int i = 0; i < NUN_SUBS; i++) {
        Path sub = new Path(tree.root + "/sub" + i);
View Full Code Here

Examples of org.apache.hadoop.mapred.MiniMRCluster.createJobConf()

    JobHistoryParser parser = null;
    RewindableInputStream ris = null;
    ArrayList<String> seenEvents = new ArrayList<String>(15);
   
    try {
      JobConf jConf = mrCluster.createJobConf();
      // construct a job with 1 map and 1 reduce task.
      Job job = MapReduceTestUtil.createJob(jConf, inDir, outDir, 1, 1);
      // disable setup/cleanup
      job.setJobSetupCleanupNeeded(false);
      // set the output format to take care of the _temporary folder
View Full Code Here

Examples of org.apache.hadoop.mapred.MiniMRCluster.createJobConf()

      MyFile[] files = createFiles(fs.getUri(), "/srcdat");
      long totsize = 0;
      for (MyFile f : files) {
        totsize += f.getSize();
      }
      Configuration job = mr.createJobConf();
      job.setLong("distcp.bytes.per.map", totsize / 3);
      ToolRunner.run(new DistCp(job),
          new String[] {"-m", "100",
                        "-log",
                        namenode+"/logs",
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.