Package com.datasalt.pangool.tuplemr

Examples of com.datasalt.pangool.tuplemr.TupleMRBuilder.cleanUpInstanceFiles()


    builder.setOutput(outPath, outputFormat, ITuple.class, NullWritable.class);
    try {
      Job job = builder.createJob();
      assertRun(job);
    } finally {
      builder.cleanUpInstanceFiles();
    }

    Assert.assertEquals(line1out + "\n" + line2out,
        Files.toString(new File(OUT + "/" + "part-r-00000"), Charset.forName("UTF-8")).trim());
View Full Code Here


    builder.setOutput(outPath, outputFormat, ITuple.class, NullWritable.class);
    Job job = builder.createJob();
    try {
      assertRun(job);
    } finally {
      builder.cleanUpInstanceFiles();
    }
    Assert.assertEquals(outHeader + "\n" + line1 + "\n" + line2 + "\n" + line3,
        Files.toString(new File(OUT + "/" + "part-r-00000"), Charset.forName("UTF-8")).trim());

    HadoopUtils.deleteIfExists(fS, inPath);
View Full Code Here

    builder.setOutput(outPath, outputFormat, ITuple.class, NullWritable.class);
    Job job = builder.createJob();
    try {
      assertRun(job);
    } finally {
      builder.cleanUpInstanceFiles();
    }
    // This is what we expect as output after field selection
    line1 = "10.0 100 true";
    line2 = "20.0 200 false";
    line3 = "30.0 300 true";
View Full Code Here

    builder.setTupleOutput(outPath, schema);
    Job job = builder.createJob();
    try {
      assertRun(job);
    } finally {
      builder.cleanUpInstanceFiles();
    }

    HadoopUtils.deleteIfExists(fS, outPath);
  }
View Full Code Here

    builder.setOutput(outPath, outputFormat, ITuple.class, NullWritable.class);
    Job job = builder.createJob();
    try {
      assertRun(job);
    } finally {
      builder.cleanUpInstanceFiles();
    }
    Assert.assertEquals(line1out + "\n" + line2out + "\n" + line3out,
        Files.toString(new File(OUT + "/" + "part-r-00000"), Charset.forName("UTF-8")).trim());

    HadoopUtils.deleteIfExists(fS, inPath);
View Full Code Here

    builder.setOutput(outPath, outputFormat, ITuple.class, NullWritable.class);
    Job job = builder.createJob();
    try {
      assertRun(job);
    } finally {
      builder.cleanUpInstanceFiles();
    }
    Assert.assertEquals(line1out + "\n" + line2out + "\n" + line3out,
        Files.toString(new File(OUT + "/" + "part-r-00000"), Charset.forName("UTF-8")).trim());

    HadoopUtils.deleteIfExists(fS, inPath);
View Full Code Here

        OutputFormat.class);
    Job job = builder.createJob();
    try {
      assertRun(job);
    } finally {
      builder.cleanUpInstanceFiles();
    }

    // Check outputs

    checkCompression(firstReducerOutput(OUTPUT + "/" + OUTPUT_1), DefaultCodec.class);
View Full Code Here

    Job job = builder.createJob();
    try {
      job.waitForCompletion(true);
    } finally {
      builder.cleanUpInstanceFiles();
    }

    // Use output as input of new TupleMRBuilder
    // To make things nicer, we evolve the Schema and use a different Schema for reading the Tuple File.
    // We remove the "content" and add a new nullable field.
View Full Code Here

    job = builder.createJob();
    try {
      assertRun(job);
    } finally {
      builder.cleanUpInstanceFiles();
    }

    Assert.assertEquals("bar2 foo2\nfoo1 bar1",
        Files.toString(new File(OUT_TEXT + "/" + "part-r-00000"), Charset.forName("UTF-8")).trim());
View Full Code Here

    builder.setTupleReducer(new IdentityTupleReducer());
    Job job = builder.createJob();
    try {
      job.waitForCompletion(true);
    } finally {
      builder.cleanUpInstanceFiles();
    }

    Path toRead = new Path(out, "part-r-00000");
    assertTrue(fS.exists(toRead));
    TupleFile.Reader reader = new TupleFile.Reader(fS, conf, toRead);
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.