Package org.apache.pig.impl

Examples of org.apache.pig.impl.PigContext


        lpt.buildPlan("E = group A by id;");
        lpt.buildPlan("B = LOAD 'data2' using "+ DummyIndexableLoader.class.getName() +"() as (id, name, grade);");
        LogicalPlan lp = lpt.buildPlan("C = join E by A.id, B by id using 'merge';");
        assertEquals(LOJoin.JOINTYPE.MERGE, ((LOJoin)lp.getLeaves().get(0)).getJoinType());

        PigContext pc = new PigContext(ExecType.MAPREDUCE,cluster.getProperties());
        pc.connect();
        boolean exceptionCaught = false;
        try{
            Util.buildPhysicalPlan(lp, pc);  
        }catch (LogicalToPhysicalTranslatorException e){
            assertEquals(1103,e.getErrorCode());
View Full Code Here


        Configuration conf = jobcontext.getConfiguration();

        ArrayList<FileSpec> inputs;
        ArrayList<ArrayList<OperatorKey>> inpTargets;
        PigContext pigContext;
        try {
            inputs = (ArrayList<FileSpec>) ObjectSerializer
                    .deserialize(conf.get("pig.inputs"));
            inpTargets = (ArrayList<ArrayList<OperatorKey>>) ObjectSerializer
                    .deserialize(conf.get("pig.inpTargets"));
            pigContext = (PigContext) ObjectSerializer.deserialize(conf
                    .get("pig.pigContext"));
            PigContext.setPackageImportList((ArrayList<String>)ObjectSerializer.deserialize(conf.get("udf.import.list")));
        } catch (Exception e) {
            int errCode = 2094;
            String msg = "Unable to deserialize object.";
            throw new ExecException(msg, errCode, PigException.BUG, e);
        }
       
        ArrayList<InputSplit> splits = new ArrayList<InputSplit>();
        for (int i = 0; i < inputs.size(); i++) {
            try {
                Path path = new Path(inputs.get(i).getFileName());
                               
                FileSystem fs;
               
                try {
                    fs = path.getFileSystem(conf);
                } catch (Exception e) {
                    // If an application specific
                    // scheme was used
                    // (e.g.: "hbase://table") we will fail
                    // getting the file system. That's
                    // ok, we just use the dfs in that case.
                    fs = new Path("/").getFileSystem(conf);
                }

                // if the execution is against Mapred DFS, set
                // working dir to /user/<userid>
                if(pigContext.getExecType() == ExecType.MAPREDUCE) {
                    fs.setWorkingDirectory(jobcontext.getWorkingDirectory());
                }
               
                // first pass input location to the loader - for this send a
                // clone of the configuration we have - this is so that if the
View Full Code Here

    }
   
    @Before
    @Override
    public void setUp() throws Exception {
        pigContext = new PigContext(ExecType.LOCAL, getProperties());
        input = File.createTempFile("PigContextTest-", ".txt");
    }
View Full Code Here

        String jarFile = tmpDir.getAbsolutePath() + FILE_SEPARATOR + jarName;
        status = Util.executeJavaCommand("jar -cf " + tmpDir.getAbsolutePath() + FILE_SEPARATOR + jarName +
                              " -C " + tmpDir.getAbsolutePath() + " " + "com");
        assertTrue(status==0);
        Properties properties = cluster.getProperties();
        PigContext pigContext = new PigContext(ExecType.MAPREDUCE, properties);
       
        //register jar using properties
        pigContext.getProperties().setProperty("pig.additional.jars", jarFile);
        PigServer pigServer = new PigServer(pigContext);

        PigContext.initializeImportList("com.xxx.udf1:com.xxx.udf2.");
        ArrayList<String> importList = PigContext.getPackageImportList();
        assertTrue(importList.size()==5);
View Full Code Here

        LogicalPlanTester tester = new LogicalPlanTester();
        tester.buildPlan( "a = load 'students.txt' as (c1,c2,c3,c4); ");
        tester.buildPlan("c = group a by c2; ");
        tester.buildPlan("f = foreach c generate COUNT(org.apache.pig.builtin.Distinct($1.$2)); ");
        LogicalPlan lp = tester.buildPlan("store f into 'out';");
        PigContext pc = new PigServer(ExecType.MAPREDUCE, cluster.getProperties()).getPigContext();
        assertTrue((Util.buildMRPlan(Util.buildPhysicalPlan(lp,pc),pc).getRoots().get(0).combinePlan.isEmpty()));
    }
View Full Code Here

        tester.buildPlan("c = group a by c2; ");
        String dummyUDF = JiraPig1030.class.getName();
        tester.buildPlan("f = foreach c generate COUNT("+dummyUDF+"" +
            "(org.apache.pig.builtin.Distinct($1.$2),"+dummyUDF+"())); ");
        LogicalPlan lp = tester.buildPlan("store f into 'out';");
        PigContext pc = new PigServer(ExecType.MAPREDUCE, cluster.getProperties()).getPigContext();
        assertTrue((Util.buildMRPlan(Util.buildPhysicalPlan(lp,pc),pc).getRoots().get(0).combinePlan.isEmpty()));
    }
View Full Code Here

        }
    }
   
    @Test
    public void testIsTempFile() throws Exception {
        PigContext context = new PigContext(ExecType.LOCAL, new Properties());
        context.connect();
        for (int i=0; i<100; i++) {
            String file = FileLocalizer.getTemporaryPath(context).toString();
            assertTrue("not a temp file: " + file, PigStatsUtil.isTempFile(file));
        }
    }
View Full Code Here

        java.lang.reflect.Method getPigContext = stats.getClass()
                .getDeclaredMethod("getPigContext");

        getPigContext.setAccessible(true);

        PigContext ctx = (PigContext) getPigContext.invoke(stats);

        Assert.assertNotNull(ctx);

        assertTrue(ctx.extraJars.contains(ClassLoader.getSystemResource("pig-withouthadoop.jar")));
        assertEquals("default", ctx.getProperties().getProperty("mapred.job.queue.name"));
      
    }
View Full Code Here

        Properties props = new Properties();
        for (Entry<Object, Object> entry : cluster.getProperties().entrySet()) {
            props.put(entry.getKey(), entry.getValue());
        }
        props.setProperty("mapred.max.split.size", Integer.toString(splitSize));
        PigContext pigContext = new PigContext(ExecType.MAPREDUCE, props);
        PigServer pig = new PigServer(pigContext);
        FileSystem fs = FileSystem.get(ConfigurationUtil.toConfiguration(props));
        fs.delete(new Path(outputFile), true);
        Util.registerMultiLineQuery(pig, scriptToTestSplitting);
       
View Full Code Here

        String inputFileName = "input2.txt";
        Util.createInputFile(cluster, inputFileName, inputData);
       
        PigServer pig = new PigServer(ExecType.MAPREDUCE, cluster
                .getProperties());
        PigContext pigContext = pig.getPigContext();
        pigContext.getProperties().setProperty( "output.compression.enabled", "true" );
        pigContext.getProperties().setProperty( "output.compression.codec", "org.apache.hadoop.io.compress.BZip2Codec" );
       
        pig.setBatchOn();
        pig.registerQuery("a = load '" +  inputFileName + "';");
        pig.registerQuery("store a into 'output2.bz2';");
        pig.registerQuery("store a into 'output2';");
View Full Code Here

TOP

Related Classes of org.apache.pig.impl.PigContext

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.