Package org.apache.hadoop.chukwa

Examples of org.apache.hadoop.chukwa.ChunkImpl


   * @return the number of bytes processed
   * @throws InterruptedException
   */
  protected int extractRecords(ChunkReceiver eq, long buffOffsetInFile,
      byte[] buf) throws InterruptedException {
    ChunkImpl chunk = new ChunkImpl(type, toWatch.getAbsolutePath(),
        buffOffsetInFile + buf.length, buf, this);

    eq.add(chunk);
    return buf.length;
  }
View Full Code Here


        + fileName + ".done"), conf);

    File outputFile = new File(directory + fileName + ".raw");

    ChukwaArchiveKey key = new ChukwaArchiveKey();
    ChunkImpl chunk = ChunkImpl.getBlankChunk();
    FileWriter out = new FileWriter(outputFile);
    try {
      while (r.next(key, chunk)) {
        out.write(new String(chunk.getData()));
      }
    } finally {
      out.close();
      r.close();
    }
View Full Code Here

    collectors.add("http://somehost.invalid/chukwa");
    send.setCollectors(new RetryListOfCollectors(collectors, 1000));
   
    byte[] data = "sometestdata".getBytes();
    Adaptor a = new FileTailingAdaptor();
    ChunkImpl ci = new ChunkImpl("testtype", "sname", data.length, data, a);
    ArrayList<Chunk> toSend = new ArrayList<Chunk>();
    toSend.add(ci);
    try {
      List<CommitListEntry> resp = send.send(toSend);
      assertTrue(resp.size() == 0);
View Full Code Here

    Configuration conf = new Configuration();
    Path path = new Path(tmpFile.getAbsolutePath());
    List<ChunkImpl> chunks = new ArrayList<ChunkImpl>();
    byte[] dat = "test".getBytes();
   
    ChunkImpl c = new ChunkImpl("Data", "aname", dat.length, dat, null);
    chunks.add(c);
   
    dat = "ing".getBytes();
    c = new ChunkImpl("Data", "aname", dat.length+4, dat, null);
    chunks.add(c);
   
    writeSeqFile(conf, FileSystem.getLocal(conf), path, chunks);
   
    String[] args = new String[] {"datatype=Data",path.toString()};
View Full Code Here

  public void testBasicPatternMatching()  {
   try {
     Filter rules = new Filter("host=foo.*&cluster=bar&datatype=Data");
     assertEquals(3, rules.size());
     byte[] dat = "someText".getBytes();
     ChunkImpl chunkNone = new ChunkImpl("badData","aname", dat.length, dat, null);
     assertFalse(rules.matches(chunkNone));
     assertTrue(Filter.ALL.matches(chunkNone));


       //do the right thing on a non-match
     ChunkImpl chunkSome = new ChunkImpl("badData", "aname", dat.length, dat, null);
     chunkSome.setSource("fooly");
     chunkSome.addTag("cluster=\"bar\"");
     assertFalse(rules.matches( chunkSome));
     assertTrue(Filter.ALL.matches(chunkSome));

     ChunkImpl chunkAll = new ChunkImpl("Data", "aname", dat.length, dat, null);
     chunkAll.setSource("fooly");
     chunkAll.addTag("cluster=\"bar\"");

     assertTrue(rules.matches(chunkAll));
     assertTrue(Filter.ALL.matches(chunkAll));

    
View Full Code Here

             "org.apache.hadoop.chukwa.extraction.demux.processor.mapper.MockMapProcessor,");
    mapper.configure(conf);

    ChunkBuilder cb = new ChunkBuilder();
    cb.addRecord(SAMPLE_RECORD_DATA.getBytes());
    ChunkImpl chunk = (ChunkImpl)cb.getChunk();

    ChukwaTestOutputCollector<ChukwaRecordKey, ChukwaRecord> output =
            new ChukwaTestOutputCollector<ChukwaRecordKey, ChukwaRecord>();

    mapper.map(new ChukwaArchiveKey(), chunk, output, Reporter.NULL);
View Full Code Here

            "org.apache.hadoop.chukwa.extraction.demux.processor.mapper.MockMapProcessor,");
    mapper.configure(conf);

    ChunkBuilder cb = new ChunkBuilder();
    cb.addRecord(SAMPLE_RECORD_DATA.getBytes());
    ChunkImpl chunk = (ChunkImpl)cb.getChunk();
    chunk.setDataType(custom_DataType);

    ChukwaTestOutputCollector<ChukwaRecordKey, ChukwaRecord> output =
            new ChukwaTestOutputCollector<ChukwaRecordKey, ChukwaRecord>();

    mapper.map(new ChukwaArchiveKey(), chunk, output, Reporter.NULL);
View Full Code Here

   }
  }
 
  public void testClusterPatterns() {
    byte[] dat = "someText".getBytes();
    ChunkImpl chunk1 = new ChunkImpl("Data", "aname", dat.length, dat, null);
    chunk1.setSource("asource");
    assertTrue(Filter.ALL.matches(chunk1));
    Filter rule = null;
    try {
      rule = new Filter("tags.foo=bar");
    } catch (CheckedPatternSyntaxException e) {
      e.printStackTrace();
      fail("Regular expression error: " + e);
    }
   
    assertFalse(rule.matches(chunk1));
    chunk1.addTag("foo=\"bar\"");
    assertTrue(rule.matches(chunk1));
    chunk1.addTag("baz=\"quux\"");
    assertTrue(rule.matches(chunk1));
    assertTrue(Filter.ALL.matches(chunk1));
  }
View Full Code Here

      List<Chunk> events = new LinkedList<Chunk>();
      StringBuilder sb = new StringBuilder();

      for (int i = 0; i < numEvents; i++) {
        ChunkImpl logEvent = ChunkImpl.read(di);
        events.add(logEvent);

        if (FANCY_DIAGNOSTICS) {
          diagnosticPage.sawChunk(logEvent, i);
        }
View Full Code Here

          }
        }
       
        byte[] data = json.toString().getBytes();   
        sendOffset+=data.length;       
        ChunkImpl c = new ChunkImpl(type, "JMX", sendOffset, data, adaptor);
        long rightNow = Calendar.getInstance(TimeZone.getTimeZone("UTC")).getTimeInMillis();
        c.addTag("timeStamp=\""+rightNow+"\"");
        receiver.add(c);
      }
      catch(ConnectException e1){
        log.error("Got connect exception for the existing MBeanServerConnection");
        log.error(ExceptionUtil.getStackTrace(e1));
View Full Code Here

TOP

Related Classes of org.apache.hadoop.chukwa.ChunkImpl

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.