Package org.apache.pig.tools.pigstats

Examples of org.apache.pig.tools.pigstats.PigStats


            LocalPigLauncher launcher = new LocalPigLauncher();

            List<ExecJob> jobs = new ArrayList<ExecJob>();
                   
            PigStats stats = launcher.launchPig(plan, jobName, pigContext);
            for (FileSpec fspec: launcher.getSucceededFiles()) {
                jobs.add(new LocalJob(ExecJob.JOB_STATUS.COMPLETED, pigContext, fspec, stats));
            }
           
            for (FileSpec fspec: launcher.getFailedFiles()) {
View Full Code Here


        stores = PlanHelper.getStores(php);

        int noJobs = stores.size();
        int failedJobs = 0;
       
        PigStats stats = new PigStats();
        stats.setPhysicalPlan(php);
        stats.setExecType(pc.getExecType());

        for (POStore op : stores) {
            op.setStoreImpl(new LocalPOStoreImpl(pc));
            op.setUp();
        }

        // We need to handle stores that have loads as successors
        // first. PlanHelper's getStores has returned those in the
        // dependency order, so that's how we will run them.
        for (Iterator<POStore> it = stores.iterator(); it.hasNext(); ) {
            POStore op = it.next();

            List<PhysicalOperator> sucs = new ArrayList<PhysicalOperator>();
            if (php.getSuccessors(op) != null) {
                sucs.addAll(php.getSuccessors(op));
            }
           
            if (sucs.size() != 0) {
                log.info("running store with dependencies");
                POStore[] st = new POStore[1];
                st[0] = op;
                failedJobs += runPipeline(st, pc);
                for (PhysicalOperator suc: sucs) {
                    php.disconnect(op, suc);
                }
                it.remove();
            }
        }
               
        // The remaining stores can be run together.
        failedJobs += runPipeline(stores.toArray(new POStore[0]), pc);
       
        stats.accumulateStats();

        UDFFinishVisitor finisher = new UDFFinishVisitor(php, new DependencyOrderWalker<PhysicalOperator, PhysicalPlan>(php));
        finisher.visit();

        for (FileSpec spec: failedStores) {
            log.info("Failed to produce result in: \""+spec.getFileName()+"\"");
        }

        for (FileSpec spec: succeededStores) {
            log.info("Successfully stored result in: \""+spec.getFileName()+"\"");
        }

        UDFContext.getUDFContext().reset();
        if (failedJobs == 0) {
            log.info("Records written : " + stats.getRecordsWritten());
            log.info("Bytes written : " + stats.getBytesWritten());
            log.info("100% complete!");
            log.info("Success!!");
            return stats;
        } else {
            log.info("Failed jobs!!");
View Full Code Here

        FileWriter fw1 = new FileWriter(f1);
        fw1.append(macro).append(script);
        fw1.close();
       
        String[] args = { "-x", "local", "-p", "output1=byuser", "-p", "output2=byage", "-c", "myscript.pig" };
        PigStats stats = PigRunner.run(args, null);
        assertTrue(stats.isSuccessful());
    }
View Full Code Here

   
    private void verify(String s, String expected) throws Exception {
        createFile("myscript.pig", s);
       
        String[] args = { "-Dpig.import.search.path=/tmp", "-x", "local", "-c", "myscript.pig" };
        PigStats stats = PigRunner.run(args, null);
       
        if (!stats.isSuccessful()) {
            System.out.println("error msg: " + stats.getErrorMessage());
        }
       
        assertTrue(stats.isSuccessful());
       
        String[] args2 = { "-Dpig.import.search.path=/tmp", "-x", "local", "-r", "myscript.pig" };
        PigRunner.run(args2, null);
       
        File f2 = new File("myscript.pig.expanded");
View Full Code Here

        String[] args = {
                (importSearchPath != null ? "-Dpig.import.search.path=" + importSearchPath : ""),
                "-x", "local", "-c", "myscript.pig"
        };
        PigStats stats = PigRunner.run(args, null);

        return stats.isSuccessful();
    }
View Full Code Here

        w2.close();
       
        try {
            String[] args = { "-Dpig.tmpfilecompression.codec=gz",
                    "-Dtfile.io.chunk.size=100", "tfile.pig" };
            PigStats stats = PigRunner.run(args, null);
    
            assertTrue(stats.isSuccessful());
            String[] args2 = { "-Dpig.tmpfilecompression.codec=gz",
                    "-Dtfile.io.chunk.size=100", "tfile2.pig" };
            PigStats stats2 = PigRunner.run(args2, null);

            assertTrue(stats2.isSuccessful());
           
            OutputStats os = stats2.result("B");
            Iterator<Tuple> iter = os.iterator();
            int count = 0;
            String expected = "(1,this is a test for compression of temp files)";
            while (iter.hasNext()) {
                count++;
View Full Code Here

    }

    private boolean executePlan(PhysicalPlan pp) throws IOException {
        boolean failed = true;
        MapReduceLauncher launcher = new MapReduceLauncher();
        PigStats stats = null;
        try {
            stats = launcher.launchPig(pp, "execute", myPig.getPigContext());
        } catch (Exception e) {
            e.printStackTrace(System.out);
            throw new IOException(e);
        }
        Iterator<JobStats> iter = stats.getJobGraph().iterator();
        while (iter.hasNext()) {
            JobStats js = iter.next();
            failed = !js.isSuccessful();
            if (failed) {
                break;
View Full Code Here

    public List<ExecJob> executeBatch(boolean parseAndBuild) throws IOException {
        if (parseAndBuild) {
            parseAndBuild();
        }

        PigStats stats = null;
        if( !isMultiQuery ) {
            // ignore if multiquery is off
            stats = PigStats.get();
        } else {
            stats = execute();
View Full Code Here

     * @return {@link ExecJob} containing information about this job
     * @throws IOException
     */
    public ExecJob store(String id, String filename, String func)
            throws IOException {
        PigStats stats = storeEx(id, filename, func);
        if (stats.getOutputStats().size() < 1) {
            throw new IOException("Couldn't retrieve job.");
        }
        OutputStats output = stats.getOutputStats().get(0);

        if(stats.isSuccessful()){
            return  new HJob(JOB_STATUS.COMPLETED, pigContext, output
                    .getPOStore(), output.getAlias(), stats);
        }else{
            HJob job = new HJob(JOB_STATUS.FAILED, pigContext,
                    output.getPOStore(), output.getAlias(), stats);

            //check for exception
            Exception ex = null;
            for(JobStats js : stats.getJobGraph()){
                if(js.getException() != null) {
                    ex = js.getException();
                }
            }
            job.setException(ex);
View Full Code Here

           return PigStats.get();
        }

        pigContext.getProperties().setProperty("pig.logical.plan.signature", currDAG.lp.getSignature());

        PigStats stats = executeCompiledLogicalPlan();

        return stats;
    }
View Full Code Here

TOP

Related Classes of org.apache.pig.tools.pigstats.PigStats

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.