Package org.apache.pig.backend.hadoop.executionengine

Examples of org.apache.pig.backend.hadoop.executionengine.HExecutionEngine


        return p2lMap;
    }

    private MROperPlan getMROperPlan(LipstickPigServer lps) throws Exception {
        HExecutionEngine he = new HExecutionEngine(lps.getPigContext());
        PhysicalPlan pp = he.compile(getLogicalPlan(lps), null);

        MRCompiler mrc = new MRCompiler(pp, lps.getPigContext());
        mrc.compile();
        return mrc.getMRPlan();
    }
View Full Code Here


        switch (execType) {
            case LOCAL:
            case MAPREDUCE:
            {
                executionEngine = new HExecutionEngine (this);

                executionEngine.init();

                dfs = executionEngine.getDataStorage();
View Full Code Here

        // front end and back end and needs to be changed.
        // Right now I am not clear on how the Job Id comes from to tell
        // the back end to kill a given job (mJobClient is used only in
        // processKill)
        //
        HExecutionEngine execEngine = mPigServer.getPigContext().getExecutionEngine();
        mJobConf = execEngine.getJobConf();
    }
View Full Code Here

            }
            break;

            case MAPREDUCE:
            {
                executionEngine = new HExecutionEngine (this);

                executionEngine.init();
               
                dfs = executionEngine.getDataStorage();
               
View Full Code Here

     * @throws ExecException if running the job fails.
     */
    public List<ExecJob> runPlan(LogicalPlan newPlan,
                                 String jobName) throws FrontendException, ExecException {

        HExecutionEngine engine = new HExecutionEngine(pigContext);
        PhysicalPlan pp = engine.compile(newPlan, null);
        PigStats stats = launchPlan(pp, jobName);
        return getJobs(stats);
    }
View Full Code Here

        switch (execType) {
            case LOCAL:
            case MAPREDUCE:
            {
                executionEngine = new HExecutionEngine (this);

                executionEngine.init();

                dfs = executionEngine.getDataStorage();
View Full Code Here

        // front end and back end and needs to be changed.
        // Right now I am not clear on how the Job Id comes from to tell
        // the back end to kill a given job (mJobClient is used only in
        // processKill)
        //
        HExecutionEngine execEngine = mPigServer.getPigContext().getExecutionEngine();
        mJobConf = execEngine.getJobConf();
    }
View Full Code Here

        MROperPlan mrp = compile(php, pc);

        ConfigurationValidator.validatePigProperties(pc.getProperties());
        Configuration conf = ConfigurationUtil.toConfiguration(pc.getProperties());

        HExecutionEngine exe = pc.getExecutionEngine();
        JobClient jobClient = new JobClient(exe.getJobConf());

        JobControlCompiler jcc = new JobControlCompiler(pc, conf);

        ScriptState.get().addWorkflowAdjacenciesToConf(mrp, conf);
View Full Code Here

        MROperPlan mrp = compile(php, pc);
               
        ConfigurationValidator.validatePigProperties(pc.getProperties());
        Configuration conf = ConfigurationUtil.toConfiguration(pc.getProperties());
       
        HExecutionEngine exe = pc.getExecutionEngine();
        JobClient jobClient = new JobClient(exe.getJobConf());
       
        JobControlCompiler jcc = new JobControlCompiler(pc, conf);
       
        // start collecting statistics
        PigStatsUtil.startCollection(pc, jobClient, jcc, mrp);
View Full Code Here

        PhysicalPlan rpep = new PhysicalPlan();
        ConstantExpression rpce = new ConstantExpression(new OperatorKey(scope,nig.getNextNodeId(scope)));
        rpce.setRequestedParallelism(rp);
        int val = rp;
        if(val<=0){
            HExecutionEngine eng = pigContext.getExecutionEngine();
            if(pigContext.getExecType() != ExecType.LOCAL){
                try {
                    if(val<=0)
                        val = pigContext.defaultParallel;
                    if (val<=0)
                        val = eng.getJobConf().getNumReduceTasks();
                    if (val<=0)
                        val = 1;
                } catch (Exception e) {
                    int errCode = 6015;
                    String msg = "Problem getting the default number of reduces from the Job Client.";
View Full Code Here

TOP

Related Classes of org.apache.pig.backend.hadoop.executionengine.HExecutionEngine

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.