Examples of collect()


Examples of org.apache.cxf.javascript.NamespacePrefixAccumulator.collect()

                           context.get(ToolConstants.CFG_JSPREFIXMAP, Map.class),
                           String.class, String.class);

        if (nsPrefixMap != null) {
            for (Map.Entry<String, String> prefixEntry : nsPrefixMap.entrySet()) {
                prefixManager.collect(prefixEntry.getValue(), prefixEntry.getKey());
            }
        }

        BufferedWriter writer = null;
        try {
View Full Code Here

Examples of org.apache.flink.runtime.operators.shipping.RecordOutputCollector.collect()

              while (!this.taskCanceled && !inFormat.reachedEnd()) {
                // build next pair and ship pair if it is valid
                typedRecord.clear();
                Record returnedRecord = null;
                if ((returnedRecord = inFormat.nextRecord(typedRecord)) != null) {
                  output.collect(returnedRecord);
                }
              }
            } else if (this.output instanceof ChainedCollectorMapDriver) {
              // Record going to a chained map task
              @SuppressWarnings("unchecked")
View Full Code Here

Examples of org.apache.hadoop.chukwa.datacollection.ChunkQueue.collect()

        }
        public void run(){
          try {
              ChunkQueue eventQueue = DataFactory.getInstance().getEventQueue();         
              List<Chunk> evts = new ArrayList<Chunk>();
          eventQueue.collect(evts, 1);
         
          // Expected - {"CompositeType":"3","String":"TestString","StringArray":6,"Map":"3","Int":20}
         
          for (Chunk e : evts) {
                String data = new String(e.getData());
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.demux.processor.ChukwaOutputCollector.collect()

        log.info("Key ["+newkey+"] Task ["+start_rec.getUniqueID()+"] Job ["+start_rec.job_id+"] Friendly ["+start_rec.getFriendlyID()+"]");

        addStitchingFields(cr);
        log.debug(cr);
        coc.collect(new ChukwaRecordKey(key.getReduceType(), newkey), cr);
       
      } else if (itemcount == 1) {
        // check that we have only the start; if we have only the end, dump it
        // otherwise change the reducetype to get record written to file for
        // incomplete entries
View Full Code Here

Examples of org.apache.hadoop.mapred.OutputCollector.collect()

            allValues = cleanTuple( allValues );

            TupleRecord key = new TupleRecord( allValues );

            if( updateValues.equals( updateIfTuple ) )
                outputCollector.collect( key, null );
            else
                outputCollector.collect( key, key );

            return;
        }
View Full Code Here

Examples of org.apache.hadoop.mapred.OutputCollector.collect()

        ImmutableBytesWritable valueBytes = (ImmutableBytesWritable) tuple.getObject(j);
        put.add(Bytes.toBytes(familyNames[i]), Bytes.toBytes((String) fields.get(j)), valueBytes.get());
      }
    }

    outputCollector.collect(null, put);
  }

  @Override
  public void sinkConfInit(FlowProcess<JobConf> process,
      Tap<JobConf, RecordReader, OutputCollector> tap, JobConf conf) {
View Full Code Here

Examples of org.apache.hadoop.mapred.OutputCollector.collect()

            TupleRecord key = new TupleRecord( allValues );

            if( updateValues.equals( updateIfTuple ) )
                outputCollector.collect( key, null );
            else
                outputCollector.collect( key, key );

            return;
        }

        Tuple result = tupleEntry.selectTuple( getSinkFields() );
View Full Code Here

Examples of org.apache.hadoop.mapred.OutputCollector.collect()

        Tuple result = tupleEntry.selectTuple( getSinkFields() );

        result = cleanTuple( result );

        outputCollector.collect( new TupleRecord( result ), null );
    }

    /**
     * Provides a hook for subclasses to escape or modify any values before creating the final SQL statement.
     *
 
View Full Code Here

Examples of org.apache.lucene.facet.search.ScoredDocIdCollector.collect()

    ScoredDocIdCollector sdic = ScoredDocIdCollector.create(docs.length, false);
    assertTrue(
        "when scoring disabled, out-of-order collection should be supported",
        sdic.acceptsDocsOutOfOrder());
    for (int i = 0; i < docs.length; i++) {
      sdic.collect(docs[i]);
    }

    assertEquals("expected 3 documents but got " + sdic.getScoredDocIDs().size(), 3, sdic.getScoredDocIDs().size());
    ScoredDocIDsIterator iter = sdic.getScoredDocIDs().iterator();
    Arrays.sort(docs);
View Full Code Here

Examples of org.apache.lucene.search.CachingCollector.collect()

      CachingCollector cc = CachingCollector.create(new NoOpCollector(false), cacheScores, 1);
      cc.setScorer(new MockScorer());
     
      // collect 1000 docs
      for (int i = 0; i < 1000; i++) {
        cc.collect(i);
      }
     
      // now replay them
      cc.replay(new Collector() {
        int prevDocID = -1;
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.