Package org.apache.pig.impl

Examples of org.apache.pig.impl.PigContext$ContextClassLoader


    if (StringUtils.hasText(jobTracker)) {
      prop.setProperty("mapred.job.tracker", jobTracker);
      // invoking setter below causes NPE since PIG expects the engine to be started already ...
      // context.setJobtrackerLocation(jobTracker);
    }
    context = new PigContext(execType, prop);

    if (StringUtils.hasText(lastAlias)) {
      context.setLastAlias(lastAlias);
    }
  }
View Full Code Here


  public boolean isSingleton() {
    return true;
  }

  protected PigServer createPigInstance() throws Exception {
    final PigContext ctx = (pigContext != null ? pigContext : new PigContext());

    // apparently if not connected, pig can cause all kind of errors
    PigServer pigServer = null;

    try {
View Full Code Here

        }
       
        private void setUpHashTable() throws IOException {
            FileSpec replFile = new FileSpec(repl,new FuncSpec(PigStorage.class.getName()+"()"));
            POLoad ld = new POLoad(new OperatorKey("Repl File Loader", 1L), replFile);
            PigContext pc = new PigContext(ExecType.MAPREDUCE,ConfigurationUtil.toProperties(PigMapReduce.sJobConfInternal.get()));
            try {
                pc.connect();
           
                ld.setPc(pc);
                Tuple dummyTuple = null;
                for(Result res=ld.getNext(dummyTuple);res.returnStatus!=POStatus.STATUS_EOP;res=ld.getNext(dummyTuple)){
                    Tuple tup = (Tuple) res.result;
                    LoadFunc lf = ((LoadFunc)pc.instantiateFuncFromSpec(ld.getLFile().getFuncSpec()));
                    String key = lf.getLoadCaster().bytesToCharArray(((DataByteArray)tup.get(keyField)).get());
                    Tuple csttup = TupleFactory.getInstance().newTuple(2);
                    csttup.set(0, key);
                    csttup.set(1, lf.getLoadCaster().bytesToInteger(((DataByteArray)tup.get(1)).get()));
                    DataBag vals = null;
View Full Code Here

                throw new AssertionError("Unhandled option " + cc.toString());
                     }
            }
        }
        // create the context with the parameter
        PigContext pigContext = new PigContext(execType, properties);

        // configure logging
        configureLog4J(properties, pigContext);
       
        if(logFileName == null && !userSpecifiedLog) {
      logFileName = validateLogFile(properties.getProperty("pig.logfile"), null);
  }
       
        if(logFileName != null) {
            log.info("Logging error messages to: " + logFileName);
        }
       
        pigContext.getProperties().setProperty("pig.logfile", (logFileName == null? "": logFileName));
       
        if(optimizerRules.size() > 0) {
          pigContext.getProperties().setProperty("pig.optimizer.rules", ObjectSerializer.serialize(optimizerRules));
        }
       
        if (properties.get("udf.import.list")!=null)
            PigContext.initializeImportList((String)properties.get("udf.import.list"));

        LogicalPlanBuilder.classloader = pigContext.createCl(null);

        // construct the parameter substitution preprocessor
        Grunt grunt = null;
        BufferedReader in;
        String substFile = null;
        switch (mode) {
        case FILE: {
            // Run, using the provided file as a pig file
            in = new BufferedReader(new FileReader(file));

            // run parameter substitution preprocessor first
            substFile = file + ".substituted";
            pin = runParamPreprocessor(in, params, paramFiles, substFile, debug || dryrun);
            if (dryrun) {
                log.info("Dry run completed. Substituted pig script is at " + substFile);
                return;
            }

            logFileName = validateLogFile(logFileName, file);
            pigContext.getProperties().setProperty("pig.logfile", logFileName);

            // Set job name based on name of the script
            pigContext.getProperties().setProperty(PigContext.JOB_NAME,
                                                   "PigLatin:" +new File(file).getName()
            );
           
            if (!debug) {
                new File(substFile).deleteOnExit();
            }
           
            grunt = new Grunt(pin, pigContext);
            gruntCalled = true;
            int results[] = grunt.exec();
            rc = getReturnCodeForStats(results);
            return;
        }

        case STRING: {
            // Gather up all the remaining arguments into a string and pass them into
            // grunt.
            StringBuffer sb = new StringBuffer();
            String remainders[] = opts.getRemainingArgs();
            for (int i = 0; i < remainders.length; i++) {
                if (i != 0) sb.append(' ');
                sb.append(remainders[i]);
            }
            in = new BufferedReader(new StringReader(sb.toString()));
            grunt = new Grunt(in, pigContext);
            gruntCalled = true;
            int results[] = grunt.exec();
            rc = getReturnCodeForStats(results);
            return;
            }

        default:
            break;
        }

        // If we're here, we don't know yet what they want.  They may have just
        // given us a jar to execute, they might have given us a pig script to
        // execute, or they might have given us a dash (or nothing) which means to
        // run grunt interactive.
        String remainders[] = opts.getRemainingArgs();
        if (remainders == null) {
            // Interactive
            mode = ExecMode.SHELL;
            ConsoleReader reader = new ConsoleReader(System.in, new OutputStreamWriter(System.out));
            reader.setDefaultPrompt("grunt> ");
            final String HISTORYFILE = ".pig_history";
            String historyFile = System.getProperty("user.home") + File.separator  + HISTORYFILE;
            reader.setHistory(new History(new File(historyFile)));
            ConsoleReaderInputStream inputStream = new ConsoleReaderInputStream(reader);
            grunt = new Grunt(new BufferedReader(new InputStreamReader(inputStream)), pigContext);
            grunt.setConsoleReader(reader);
            gruntCalled = true;
            grunt.run();
            rc = 0;
            return;
        } else {
            // They have a pig script they want us to run.
            if (remainders.length > 1) {
                   throw new RuntimeException("You can only run one pig script "
                    + "at a time from the command line.");
            }
            mode = ExecMode.FILE;
            in = new BufferedReader(new FileReader(remainders[0]));

            // run parameter substitution preprocessor first
            substFile = remainders[0] + ".substituted";
            pin = runParamPreprocessor(in, params, paramFiles, substFile, debug || dryrun);
            if (dryrun){
                log.info("Dry run completed. Substituted pig script is at " + substFile);
                return;
            }
           
            logFileName = validateLogFile(logFileName, remainders[0]);
            pigContext.getProperties().setProperty("pig.logfile", logFileName);

            if (!debug) {
                new File(substFile).deleteOnExit();
            }

            // Set job name based on name of the script
            pigContext.getProperties().setProperty(PigContext.JOB_NAME,
                                                   "PigLatin:" +new File(remainders[0]).getName()
            );

            grunt = new Grunt(pin, pigContext);
            gruntCalled = true;
View Full Code Here

        POLoad ld = new POLoad(new OperatorKey(this.mKey.scope,NodeIdGenerator.getGenerator().getNextNodeId(this.mKey.scope)),
                new FileSpec(indexFileName, idxFuncSpec));

        // Index file is distributed through Distributed Cache to all mappers. So, read it locally.
        Properties props = ConfigurationUtil.getLocalFSProperties();
        ld.setPc(new PigContext(ExecType.LOCAL, props));

        // Each index entry is read as a pair of split index and a tuple consisting of key.
        List<Pair<Integer,Tuple>> index = new ArrayList<Pair<Integer,Tuple>>();

        for(Result res=ld.getNext(dummyTuple);res.returnStatus!=POStatus.STATUS_EOP;res=ld.getNext(dummyTuple)){
View Full Code Here

        // the join key
        Object firstLeftKey = (keys.size() == 1 ? keys.get(0): keys);
        POLoad ld = new POLoad(genKey(), new FileSpec(indexFile, new FuncSpec(indexFileLoadFuncSpec)));
               
        Properties props = ConfigurationUtil.getLocalFSProperties();
        PigContext pc = new PigContext(ExecType.LOCAL, props);
        ld.setPc(pc);
        index = new LinkedList<Tuple>();
        for(Result res=ld.getNext(dummyTuple);res.returnStatus!=POStatus.STATUS_EOP;res=ld.getNext(dummyTuple))
            index.offer((Tuple) res.result);  
View Full Code Here

       
        initRightLoader(splitsAhead);
    }
   
    private void initRightLoader(int [] splitsToBeRead) throws IOException{
        PigContext pc = (PigContext) ObjectSerializer
                .deserialize(PigMapReduce.sJobConf.get("pig.pigContext"));
       
        Configuration conf = ConfigurationUtil.toConfiguration(pc.getProperties());
       
        // Hadoop security need this property to be set
        if (System.getenv("HADOOP_TOKEN_FILE_LOCATION") != null) {
            conf.set("mapreduce.job.credentials.binary",
                    System.getenv("HADOOP_TOKEN_FILE_LOCATION"));
View Full Code Here

            POLoad ld = new POLoad(new OperatorKey("Repl File Loader", 1L),
                    replFile);
           
            Properties props = ConfigurationUtil.getLocalFSProperties();
            PigContext pc = new PigContext(ExecType.LOCAL, props);  
            ld.setPc(pc);
            // We use LocalRearrange Operator to seperate Key and Values
            // eg. ( a, b, c ) would generate a, ( a, b, c )
            // And we use 'a' as the key to the HashMap
            // The rest '( a, b, c )' is added to HashMap as value
View Full Code Here

        // All the needed variables
        Map<LogicalOperator, LogicalPlan> aliases = new HashMap<LogicalOperator, LogicalPlan>();
        Map<OperatorKey, LogicalOperator> opTable = new HashMap<OperatorKey, LogicalOperator>() ;
        Map<String, LogicalOperator> aliasOp = new HashMap<String, LogicalOperator>() ;
        Map<String, String> fileNameMap = new HashMap<String, String>();
        PigContext pigContext = new PigContext(ExecType.LOCAL, new Properties()) ;
        pigContext.connect();
       
        String tempFile = this.prepareTempFile() ;
       
        // Start the real parsing job
        {
View Full Code Here

        }
       
        private void setUpHashTable() throws IOException {
            FileSpec replFile = new FileSpec(repl,new FuncSpec(PigStorage.class.getName()+"()"));
            POLoad ld = new POLoad(new OperatorKey("Repl File Loader", 1L), replFile);
            PigContext pc = new PigContext(ExecType.MAPREDUCE,ConfigurationUtil.toProperties(PigMapReduce.sJobConf));
            try {
                pc.connect();
           
                ld.setPc(pc);
                Tuple dummyTuple = null;
                for(Result res=ld.getNext(dummyTuple);res.returnStatus!=POStatus.STATUS_EOP;res=ld.getNext(dummyTuple)){
                    Tuple tup = (Tuple) res.result;
                    LoadFunc lf = ((LoadFunc)pc.instantiateFuncFromSpec(ld.getLFile().getFuncSpec()));
                    String key = lf.getLoadCaster().bytesToCharArray(((DataByteArray)tup.get(keyField)).get());
                    Tuple csttup = TupleFactory.getInstance().newTuple(2);
                    csttup.set(0, key);
                    csttup.set(1, lf.getLoadCaster().bytesToInteger(((DataByteArray)tup.get(1)).get()));
                    DataBag vals = null;
View Full Code Here

TOP

Related Classes of org.apache.pig.impl.PigContext$ContextClassLoader

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.