Examples of ExecJob


Examples of org.apache.pig.backend.executionengine.ExecJob

    //
    // Use pig STORE to store testing data
    //
    System.out.println("path = " + path);
    try {
        ExecJob pigJob = pigServer
            .store(
                "records",
                path.toString(),
                TableStorer.class.getCanonicalName()
                    + "('[s1, s2]; [s3, s4]')");
View Full Code Here

Examples of org.apache.pig.backend.executionengine.ExecJob

    while (it.hasNext()) {
      Tuple cur = it.next();
      System.out.println(cur);
    }

    ExecJob pigJob = pigServer.store("records", new Path(pathTable, "store")
        .toString(), TableStorer.class.getCanonicalName() + "('[c]')");

    Assert.assertNull(pigJob.getException());
  }
View Full Code Here

Examples of org.apache.pig.backend.executionengine.ExecJob

      String dbStore = "org.apache.pig.piggybank.storage.DBStorage('" + driver                                                                                                                      
        + "', '" + dbUrl + "','" + user+ "', '"+ password + "', '" + insertQuery + "');";
    pigServer.registerQuery("A = LOAD '" + INPUT_FILE
        + "' as (id:int, fruit:chararray, ratio:double);");
    pigServer.registerQuery("STORE A INTO 'dummy' USING " + dbStore);
    ExecJob job = pigServer.executeBatch().get(0);
    try {
      while(!job.hasCompleted()) Thread.sleep(1000);
    } catch(InterruptedException ie) {// ignore
    }
   
    assertNotSame("Failed: " + job.getException(), job.getStatus(),
            ExecJob.JOB_STATUS.FAILED);
   
    Connection con = null;
    String selectQuery = "select id, name, ratio from ttt order by name";
    try {
View Full Code Here

Examples of org.apache.pig.backend.executionengine.ExecJob

    removeDir(getTableFullPath(newPath.toString()+"1"));

    /*
     * Table1 creation
     */
    ExecJob pigJob = pigServer
        .store(
            "records6",
            newPath.toString()+"1",
            TableStorer.class.getCanonicalName()
                + "('[records3::SF_a]; [records4::SF_a]')");
    Assert.assertNull(pigJob.getException());
   
    removeDir(getTableFullPath(newPath.toString()+"1"));
  }
View Full Code Here

Examples of org.apache.pig.backend.executionengine.ExecJob

    // Store using multiple outputs;
    String outputPaths = "us,india,japan";
    removeDir(getTableFullPath("us"));
    removeDir(getTableFullPath("india"));
    removeDir(getTableFullPath("japan"));
    ExecJob pigJob = pigServer
      .store(
        "records",
        outputPaths,
        TableStorer.class.getCanonicalName() +
             "('[word,count]', 'org.apache.hadoop.zebra.pig.TestMultipleOutputs1$OutputPartitionerClass')");   
   
    Assert.assertNull(pigJob.getException());
   
    // Validate results;
    query = "records = LOAD '" + "us"
          + "' USING org.apache.hadoop.zebra.pig.TableLoader();";
View Full Code Here

Examples of org.apache.pig.backend.executionengine.ExecJob

    // Store using multiple outputs;
    String outputPaths = "us,india,japan";
    removeDir(getTableFullPath("us"));
    removeDir(getTableFullPath("india"));
    removeDir(getTableFullPath("japan"));
    ExecJob pigJob = pigServer
      .store(
        "records",
        outputPaths,
        TableStorer.class.getCanonicalName() +
             "('[word,count]', 'org.apache.hadoop.zebra.pig.TestMultipleOutputs1$OutputPartitionerClass')");   
   
    Assert.assertNull(pigJob.getException());
   
    // Validate results;
    query = "records = LOAD '" + "us"
          + "' USING org.apache.hadoop.zebra.pig.TableLoader();";
View Full Code Here

Examples of org.apache.pig.backend.executionengine.ExecJob

    t1++;
   
    String table1path = pathTable1.toString() + Integer.toString(t1);
    removeDir(new Path(table1path));

ExecJob pigJob =pigServer.store("sort1", table1path, TableStorer.class.getCanonicalName()
        + "('[a, b, c]; [d, e, f, r1, m1]')");
if (pigJob.getException() != null){
  System.out.println("******pig job exception"+ pigJob.getException().getMessage());
}
Assert.assertNull(pigJob.getException());
    String query3 = "records1 = LOAD '"
        + table1path
        + "' USING org.apache.hadoop.zebra.pig.TableLoader('a, b, c, d, e, f, r1, m1', 'sorted');";

    System.out.println("query3:" + query3);
    pigServer.registerQuery(query3);  
   
    String foreach = "records11 = foreach records1 generate a as a, b as b, c as c, d as d, e as e, f as f, r1 as r1, m1#'a' as ma1;";
    pigServer.registerQuery(foreach);
 
    /*
     * Table2 creation
     */
    this.t1++;
    String table2path = this.pathTable2.toString() + Integer.toString(this.t1);
    removeDir(new Path(table2path));
    pigJob = pigServer.store("sort2", table2path, TableStorer.class.getCanonicalName()
        + "('[a, b, c]; [d,e,f,r1,m1]')");
    if (pigJob.getException() != null){
      System.out.println("******pig job exception"+ pigJob.getException().getMessage());
    }
    Assert.assertNull(pigJob.getException());
    String query4 = "records2 = LOAD '" + table2path
        + "' USING org.apache.hadoop.zebra.pig.TableLoader();";
    pigServer.registerQuery(query4);

   
View Full Code Here

Examples of org.apache.pig.backend.executionengine.ExecJob

     */
    this.t1++;
   
    String table1path = this.pathTable1.toString() + Integer.toString(this.t1);
    removeDir(new Path(table1path));
    ExecJob pigJob = pigServer.store("sort1", table1path, TableStorer.class.getCanonicalName()
        + "('[a, b, c]; [d, e, f, r1, m1]')");

    Assert.assertNull(pigJob.getException());

    String query3 = "records1 = LOAD '"
        + table1path
        + "' USING org.apache.hadoop.zebra.pig.TableLoader('a, b, c, d, e, f, r1, m1', 'sorted');";

    System.out.println("query3:" + query3);
    pigServer.registerQuery(query3);  
   
    String foreach = "records11 = foreach records1 generate a as a, b as b, c as c, d as d, e as e, f as f, r1 as r1, m1 as m1;";
    pigServer.registerQuery(foreach);
//    System.out.println( "Left table: >>>>>.");
//    printTuples(pigServer.openIterator("records11"));
   /* Iterator<Tuple> it_ordered = pigServer.openIterator("records1");
    int row_ordered = 0;
    Tuple RowValue_ordered = null;
    while (it_ordered.hasNext()) {
      RowValue_ordered = it_ordered.next();
      row_ordered++;
      System.out.println("row : " + row_ordered + " field a value: "
          + RowValue_ordered.get(0));
    }
    System.out.println("total row for table 1 after ordered:" + row_ordered);*/

    /*
     * Table2 creation
     */
    this.t1++;
    String table2path = this.pathTable2.toString() + Integer.toString(this.t1);
    removeDir(new Path(table2path));
    pigJob = pigServer.store("sort2", table2path, TableStorer.class.getCanonicalName()
        + "('[a, b, c]; [d,e,f,r1,m1]')");
    if (pigJob.getException() != null){
      System.out.println("******pig job exception"+ pigJob.getException().getMessage());
    }
    Assert.assertNull(pigJob.getException());
    String query4 = "records2 = LOAD '" + table2path
        + "' USING org.apache.hadoop.zebra.pig.TableLoader();";
    pigServer.registerQuery(query4);

//    System.out.println( "Right table: >>>>>.");
View Full Code Here

Examples of org.apache.pig.backend.executionengine.ExecJob

    while (it.hasNext()) {
      Tuple cur = it.next();
      System.out.println(cur);
    }

    ExecJob pigJob = pigServer.store("records", new Path(pathTable, "store")
        .toString(), TableStorer.class.getCanonicalName() + "('[m#{a|b}]')");

    Assert.assertNull(pigJob.getException());

  }
View Full Code Here

Examples of org.apache.pig.backend.executionengine.ExecJob

            if (currDAG.isBatchOn()) {
                currDAG.execute();
            }
           
            ExecJob job = store(id, FileLocalizer.getTemporaryPath(pigContext)
                    .toString(), Utils.getTmpFileCompressorName(pigContext) + "()");
           
            // invocation of "execute" is synchronous!

            if (job.getStatus() == JOB_STATUS.COMPLETED) {
                return job.getResults();
            } else if (job.getStatus() == JOB_STATUS.FAILED
                       && job.getException() != null) {
                // throw the backend exception in the failed case
                Exception e = job.getException();
                int errCode = 1066;
                String msg = "Unable to open iterator for alias " + id +
                ". Backend error : " + e.getMessage();
                throw new FrontendException(msg, errCode, PigException.INPUT, e);
            } else {
                throw new IOException("Job terminated with anomalous status "
                    + job.getStatus().toString());
            }
        }
        catch(FrontendException e){
            throw e;
        }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.