Package org.apache.hadoop.chukwa

Examples of org.apache.hadoop.chukwa.ChunkBuilder


    return "10.10.182.49 [" + sdf.format(date) +
            "] \"\" 200 \"-\" \"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.5; en-US; rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3\" \"some.site.com:8076\"";
  }

  public void doTest(Date date, String recordData) {
    ChunkBuilder cb = new ChunkBuilder();
    cb.addRecord(recordData.getBytes());

    Chunk chunk = cb.getChunk();
    chunk.setDataType(DATA_TYPE);
    chunk.setSource(DATA_SOURCE);

    ChukwaTestOutputCollector<ChukwaRecordKey, ChukwaRecord> output =
            new ChukwaTestOutputCollector<ChukwaRecordKey, ChukwaRecord>();
View Full Code Here


  String[] data = { "dsjsjbsfjds\ndsafsfasd\n",
      "asdgHSAJGDGYDGGHAgd7364rt3478tc4\nhr473rt346t\n", "e  gqd  yeegyxuyexfg\n" };

  public void testParse() {

    ChunkBuilder cb = new ChunkBuilder();
    cb.addRecord(RecordConstants.escapeAllButLastRecordSeparator("\n", data[0])
        .getBytes());
    cb.addRecord(RecordConstants.escapeAllButLastRecordSeparator("\n", data[1])
        .getBytes());
    cb.addRecord(RecordConstants.escapeAllButLastRecordSeparator("\n", data[2])
        .getBytes());
    Chunk chunk = cb.getChunk();
    OutputCollector<ChukwaRecordKey, ChukwaRecord> output = new ChukwaTestOutputCollector<ChukwaRecordKey, ChukwaRecord>();
    TProcessor p = new TProcessor();
    p.data = data;
    p.process(null, chunk, output, null);
  }
View Full Code Here

      ChukwaWriter localWriter = new LocalWriter();
     
      List<Chunk> chunksSeqWriter = new LinkedList<Chunk>();
      List<Chunk> chunksLocalWriter = new LinkedList<Chunk>();
      for(int i=0;i<10;i++) {
        ChunkBuilder cb1 = new ChunkBuilder();
        cb1.addRecord(("record-" +i) .getBytes());
        cb1.addRecord("foo" .getBytes());
        cb1.addRecord("bar".getBytes());
        cb1.addRecord("baz".getBytes());
        chunksSeqWriter.add(cb1.getChunk());
       
        ChunkBuilder cb2 = new ChunkBuilder();
        cb2.addRecord(("record-" +i) .getBytes());
        cb2.addRecord("foo" .getBytes());
        cb2.addRecord("bar".getBytes());
        cb2.addRecord("baz".getBytes());
        chunksLocalWriter.add(cb2.getChunk());
       
      }
     
      File tempDir = new File(System.getProperty("test.build.data", "/tmp"));
      if (!tempDir.exists()) {
View Full Code Here

    JobConf conf = new JobConf();
    conf.set("chukwa.demux.mapper.default.processor",
             "org.apache.hadoop.chukwa.extraction.demux.processor.mapper.MockMapProcessor");
    mapper.configure(conf);

    ChunkBuilder cb = new ChunkBuilder();
    cb.addRecord(SAMPLE_RECORD_DATA.getBytes());
    ChunkImpl chunk = (ChunkImpl)cb.getChunk();

    ChukwaTestOutputCollector<ChukwaRecordKey, ChukwaRecord> output =
            new ChukwaTestOutputCollector<ChukwaRecordKey, ChukwaRecord>();

    mapper.map(new ChukwaArchiveKey(), chunk, output, Reporter.NULL);
View Full Code Here

      ChukwaWriter localWriter = new LocalWriter();
     
      List<Chunk> chunksSeqWriter = new LinkedList<Chunk>();
      List<Chunk> chunksLocalWriter = new LinkedList<Chunk>();
      for(int i=0;i<10;i++) {
        ChunkBuilder cb1 = new ChunkBuilder();
        cb1.addRecord(("record-" +i) .getBytes());
        cb1.addRecord("foo" .getBytes());
        cb1.addRecord("bar".getBytes());
        cb1.addRecord("baz".getBytes());
        chunksSeqWriter.add(cb1.getChunk());
       
        ChunkBuilder cb2 = new ChunkBuilder();
        cb2.addRecord(("record-" +i) .getBytes());
        cb2.addRecord("foo" .getBytes());
        cb2.addRecord("bar".getBytes());
        cb2.addRecord("baz".getBytes());
        chunksLocalWriter.add(cb2.getChunk());
       
      }
     
      File tempDir = new File(System.getProperty("test.build.data", "/tmp"));
      if (!tempDir.exists()) {
View Full Code Here

    }
   
     Tracer t = Tracer.startNewTracer("honu.server.processChunk");
    //this.counters.get(chunkCountField).incrementAndGet();
   
    ChunkBuilder cb = new ChunkBuilder();
    List<String> logEvents = tChunk.getLogEvents();
   
   
    for(String logEvent :logEvents) {
      cb.addRecord(logEvent.getBytes());
    }
   
    Chunk c = cb.getChunk();
    c.setApplication(tChunk.getApplication());
    c.setDataType(tChunk.getDataType());
    c.setSeqID(tChunk.getSeqId());
    c.setSource(tChunk.getSource());
    c.setTags(tChunk.getTags());
View Full Code Here

            SequenceFile.CompressionType.NONE, codec);
      }
     
     
      String str = null;
      ChunkBuilder cb = null;
    
      do
      {
        str = in.readLine();
       
        if (str != null) {
          lineCount ++;
          if (cb == null) {
            cb = new ChunkBuilder();
          }
          cb.addRecord(str.getBytes());
          if (lineCount%300 == 0) {
            append(seqFileWriter,getChunk(cb,dataType));
            cb = null;
          }
        }  
View Full Code Here

    JobConf conf = new JobConf();
    conf.set("chukwa.demux.mapper.default.processor",
             "org.apache.hadoop.chukwa.extraction.demux.processor.mapper.MockMapProcessor,");
    mapper.configure(conf);

    ChunkBuilder cb = new ChunkBuilder();
    cb.addRecord(SAMPLE_RECORD_DATA.getBytes());
    ChunkImpl chunk = (ChunkImpl)cb.getChunk();

    ChukwaTestOutputCollector<ChukwaRecordKey, ChukwaRecord> output =
            new ChukwaTestOutputCollector<ChukwaRecordKey, ChukwaRecord>();

    mapper.map(new ChukwaArchiveKey(), chunk, output, Reporter.NULL);
View Full Code Here

    JobConf conf = new JobConf();
    conf.set(custom_DataType,
            "org.apache.hadoop.chukwa.extraction.demux.processor.mapper.MockMapProcessor,");
    mapper.configure(conf);

    ChunkBuilder cb = new ChunkBuilder();
    cb.addRecord(SAMPLE_RECORD_DATA.getBytes());
    ChunkImpl chunk = (ChunkImpl)cb.getChunk();
    chunk.setDataType(custom_DataType);

    ChukwaTestOutputCollector<ChukwaRecordKey, ChukwaRecord> output =
            new ChukwaTestOutputCollector<ChukwaRecordKey, ChukwaRecord>();
View Full Code Here

      ChukwaWriter localWriter = new LocalWriter();
     
      List<Chunk> chunksSeqWriter = new LinkedList<Chunk>();
      List<Chunk> chunksLocalWriter = new LinkedList<Chunk>();
      for(int i=0;i<10;i++) {
        ChunkBuilder cb1 = new ChunkBuilder();
        cb1.addRecord(("record-" +i) .getBytes());
        cb1.addRecord("foo" .getBytes());
        cb1.addRecord("bar".getBytes());
        cb1.addRecord("baz".getBytes());
        chunksSeqWriter.add(cb1.getChunk());
       
        ChunkBuilder cb2 = new ChunkBuilder();
        cb2.addRecord(("record-" +i) .getBytes());
        cb2.addRecord("foo" .getBytes());
        cb2.addRecord("bar".getBytes());
        cb2.addRecord("baz".getBytes());
        chunksLocalWriter.add(cb2.getChunk());
       
      }
     
      File tempDir = new File(System.getProperty("test.build.data", "/tmp"));
      if (!tempDir.exists()) {
View Full Code Here

TOP

Related Classes of org.apache.hadoop.chukwa.ChunkBuilder

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.