Package org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators

Examples of org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POUserFunc


                    ce1val.set(0,inputs.size());
                    ce1val.set(1,count);
                    ce1.setValue(ce1val);
                    ce1.setResultType(DataType.TUPLE);*/

                    POUserFunc gfc = new POUserFunc(new OperatorKey(scope, nodeGen.getNextNodeId(scope)),cross.getRequestedParallelism(), Arrays.asList((PhysicalOperator)ce1,(PhysicalOperator)ce2), new FuncSpec(GFCross.class.getName()));
                    gfc.addOriginalLocation(cross.getAlias(), cross.getLocation());
                    gfc.setResultType(DataType.BAG);
                    fep1.addAsLeaf(gfc);
                    gfc.setInputs(Arrays.asList((PhysicalOperator)ce1,(PhysicalOperator)ce2));
                    /*fep1.add(gfc);
                    fep1.connect(ce1, gfc);
                    fep1.connect(ce2, gfc);*/

                    PhysicalPlan fep2 = new PhysicalPlan();
View Full Code Here


  public void userFuncArity(DataBag input ) throws ExecException {
    String funcSpec = ARITY.class.getName() + "()";
    PORead read = new PORead(new OperatorKey("", r.nextLong()), input);
    List<PhysicalOperator> inputs = new LinkedList<PhysicalOperator>();
    inputs.add(read);
    POUserFunc userFunc = new POUserFunc(new OperatorKey("", r.nextLong()),
        -1, inputs, new FuncSpec(funcSpec));
    Result res = new Result();
    Integer i = null;
    res = userFunc.getNextInteger();
    while (res.returnStatus != POStatus.STATUS_EOP) {
      // System.out.println(res.result);
      int result = (Integer) res.result;
      assertEquals(2, result);
      res = userFunc.getNextInteger();
    }
  }
View Full Code Here

    Tuple tup2 = Util.loadNestTuple(TupleFactory.getInstance().newTuple(1),
        input);
    // System.out.println("Input = " + tup1);
    String funcSpec = AVG.class.getName() + "()";

    POUserFunc po = new POUserFunc(new OperatorKey("", r.nextLong()), -1,
        null, new FuncSpec(funcSpec));

                //************ Initial Calculations ******************
    TupleFactory tf = TupleFactory.getInstance();
    po.setAlgebraicFunction(INIT);
    po.attachInput(tup1);
    Tuple t = null;
    Result res = po.getNextTuple();
    Tuple outputInitial1 = (res.returnStatus == POStatus.STATUS_OK) ? (Tuple) res.result
        : null;
    Tuple outputInitial2 = (res.returnStatus == POStatus.STATUS_OK) ? (Tuple) res.result
        : null;
    System.out.println(outputInitial1 + " " + outputInitial2);
    assertEquals(outputInitial1, outputInitial2);
    Double sum = (Double) outputInitial1.get(0);
    Long count = (Long) outputInitial1.get(1);
    assertEquals(initialExpectedSum, sum);
    assertEquals(initialExpectedCount, count);

                //************ Intermediate Data and Calculations ******************
    DataBag bag = BagFactory.getInstance().newDefaultBag();
    bag.add(outputInitial1);
    bag.add(outputInitial2);
    Tuple outputInitial = tf.newTuple();
    outputInitial.append(bag);
    // Tuple outputIntermed = intermed.exec(outputInitial);
    po = new POUserFunc(new OperatorKey("", r.nextLong()), -1, null,
        new FuncSpec(funcSpec));
    po.setAlgebraicFunction(INTERMED);
    po.attachInput(outputInitial);
    res = po.getNextTuple();
    Tuple outputIntermed = (res.returnStatus == POStatus.STATUS_OK) ? (Tuple) res.result
        : null;

    sum = (Double) outputIntermed.get(0);
    count = (Long) outputIntermed.get(1);
    assertEquals(intermedExpectedSum, sum);
    assertEquals(intermedExpectedCount, count);
    System.out.println(outputIntermed);

                //************ Final Calculations ******************
    po = new POUserFunc(new OperatorKey("", r.nextLong()), -1, null,
        new FuncSpec(funcSpec));
    po.setAlgebraicFunction(FINAL);
    po.attachInput(outputInitial);
    res = po.getNextTuple();
    Double output = (res.returnStatus == POStatus.STATUS_OK) ? (Double) res.result
        : null;
    // Double output = fin.exec(outputInitial);
    assertEquals((Double)expectedAvg, output);
    // System.out.println("output = " + output);
View Full Code Here

   
            // setup udf
            List<PhysicalOperator> udfInps = new ArrayList<PhysicalOperator>();
            udfInps.add(projVal1);
            FuncSpec sumSpec = new FuncSpec(IntSum.Intermediate.class.getName());
            POUserFunc sumUdf = new POUserFunc(GenPhyOp.getOK(), -1, udfInps,
                    sumSpec);
            valPlan.add(sumUdf);
            valPlan.connect(projVal1, sumUdf);
   
            valuePlans.add(valPlan);
View Full Code Here

            prjStar4.setStar(true);
            ep4.add(prjStar4);
           
            List ufInps = new ArrayList();
            ufInps.add(prjStar4);
            POUserFunc uf = new POUserFunc(new OperatorKey("", r.nextLong()), -1, ufInps, new FuncSpec(string));
            ep4.add(uf);
            ep4.connect(prjStar4, uf);
            ep4s.add(ep4);
            flattened3.add(false);
        }
View Full Code Here

                       
                    }
                    // Also set the Distinct's function to type Initial in map
                    // to type Intermediate in combine plan and to type Final in
                    // the reduce
                    POUserFunc distinctFunc = (POUserFunc)getDistinctUserFunc(plans[j], leaf);
                    try {
                        distinctFunc.setAlgebraicFunction(funcTypes[j]);
                    } catch (ExecException e) {
                        int errCode = 2074;
                        String msg = "Could not configure distinct's algebraic functions in map reduce plan.";
                        throw new PlanException(msg, errCode, PigException.BUG, e);
                    }
View Full Code Here

        if (!(leaf instanceof POUserFunc)) {
            int errCode = 2020;
            String msg = "Expected to find plan with UDF leaf. Found " + leaf.getClass().getSimpleName();
            throw new PlanException(msg, errCode, PigException.BUG);
        }
        POUserFunc func = (POUserFunc)leaf;
        try {
            func.setAlgebraicFunction(type);
        } catch (ExecException e) {
            int errCode = 2075;
            String msg = "Could not set algebraic function type.";
            throw new PlanException(msg, errCode, PigException.BUG, e);
        }
View Full Code Here

                    // explicitly set distinctPredecessor's result type to
                    // be tuple - this is relevant when distinctPredecessor is
                    // originally a POForeach with return type BAG - we need to
                    // set it to tuple so we get a stream of tuples.
                    distinctPredecessor.setResultType(DataType.TUPLE);
                    POUserFunc func = new POUserFunc(new OperatorKey(scope,
                            NodeIdGenerator.getGenerator().getNextNodeId(scope)),-1, funcInput, fSpec);
                    func.setResultType(DataType.BAG);
                    mPlan.replace(proj, func);
                    mPlan.remove(pred);
                    // connect the the newly added "func" to
                    // the predecessor to the earlier PODistinct
                    mPlan.connect(distinctPredecessor, func);
View Full Code Here

    @Override
    public void visit( UserFuncExpression op ) throws FrontendException {      
        Object f = PigContext.instantiateFuncFromSpec(op.getFuncSpec());
        PhysicalOperator p;
        if (f instanceof EvalFunc) {
            p = new POUserFunc(new OperatorKey(DEFAULT_SCOPE, nodeGen
                    .getNextNodeId(DEFAULT_SCOPE)), -1,
                    null, op.getFuncSpec(), (EvalFunc) f);
            ((POUserFunc)p).setSignature(op.getSignature());
            List<String> cacheFiles = ((EvalFunc)f).getCacheFiles();
            if (cacheFiles != null) {
View Full Code Here

                    ce1val.set(0,inputs.size());
                    ce1val.set(1,count);
                    ce1.setValue(ce1val);
                    ce1.setResultType(DataType.TUPLE);*/

                    POUserFunc gfc = new POUserFunc(new OperatorKey(scope, nodeGen.getNextNodeId(scope)),cross.getRequestedParallelism(), Arrays.asList((PhysicalOperator)ce1,(PhysicalOperator)ce2), new FuncSpec(GFCross.class.getName()));
                    gfc.addOriginalLocation(cross.getAlias(), cross.getLocation());
                    gfc.setResultType(DataType.BAG);
                    fep1.addAsLeaf(gfc);
                    gfc.setInputs(Arrays.asList((PhysicalOperator)ce1,(PhysicalOperator)ce2));
                    /*fep1.add(gfc);
                    fep1.connect(ce1, gfc);
                    fep1.connect(ce2, gfc);*/

                    PhysicalPlan fep2 = new PhysicalPlan();
View Full Code Here

TOP

Related Classes of org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POUserFunc

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.