Package org.apache.hadoop.chukwa

Examples of org.apache.hadoop.chukwa.ChunkImpl


  protected int extractRecords(ChunkReceiver eq, long buffOffsetInFile,
      byte[] buf) throws InterruptedException {
    if(buf.length == 0)
      return 0;
   
    ChunkImpl chunk = new ChunkImpl(type, toWatch.getAbsolutePath(),
        buffOffsetInFile + buf.length, buf, this);

    eq.add(chunk);
    return buf.length;
  }
View Full Code Here


      int[] offsets_i = { lastNewLineOffset };

      int bytesUsed = lastNewLineOffset + 1; // char at last
                                             // offset uses a byte
      assert bytesUsed > 0 : " shouldn't send empty events";
      ChunkImpl event = new ChunkImpl(type, toWatch.getAbsolutePath(),
          buffOffsetInFile + bytesUsed, Arrays.copyOf(buf, bytesUsed), this);

      event.setRecordOffsets(offsets_i);
      eq.add(event);

      return bytesUsed;
    } else
      return 0;
View Full Code Here

      }
      json.put("disk", fsList);
      json.put("timestamp", System.currentTimeMillis());
      byte[] data = json.toString().getBytes();
      sendOffset += data.length;
      ChunkImpl c = new ChunkImpl("SystemMetrics", "Sigar", sendOffset, data, systemMetrics);
      if(!skip) {
        receiver.add(c);
      }
    } catch (Exception se) {
      log.error(ExceptionUtil.getStackTrace(se));
View Full Code Here

    SequenceFile.Reader reader = null;
    try {
      reader = new SequenceFile.Reader(fs, new Path(file), conf);

      ChukwaArchiveKey key = new ChukwaArchiveKey();
      ChunkImpl chunk = ChunkImpl.getBlankChunk();

      StringBuilder sb = new StringBuilder();
      while (reader.next(key, chunk)) {
        sb.append("\nTimePartition: " + key.getTimePartition());
        sb.append("DataType: " + key.getDataType());
        sb.append("StreamName: " + key.getStreamName());
        sb.append("SeqId: " + key.getSeqId());
        sb.append("\t\t =============== ");

        sb.append("Cluster : " + chunk.getTags());
        sb.append("DataType : " + chunk.getDataType());
        sb.append("Source : " + chunk.getSource());
        sb.append("Application : " + chunk.getStreamName());
        sb.append("SeqID : " + chunk.getSeqID());
        sb.append("Data : " + new String(chunk.getData()));
        return sb.toString();
      }
    } catch (Throwable e) {
     Assert.fail("Exception while reading SeqFile"+ e.getMessage());
     throw e;
View Full Code Here

   
  }
 
  public void testWriters() {
    ArrayList<Chunk> chunks = new ArrayList<Chunk>();
    chunks.add(new ChunkImpl("TextParser", "name", timestamp, test, null));     
    try {     
      cc.set("hbase.demux.package", "org.apache.chukwa.datacollection.writer.test.demux");
      cc.set("TextParser","org.apache.hadoop.chukwa.datacollection.writer.test.demux.TextParser");
      conf.set(HConstants.ZOOKEEPER_QUORUM, "127.0.0.1");
      hbw = new HBaseWriter(cc, conf);
View Full Code Here

    json.put("maps_killed", "3");
    json.put("waiting_maps", "1");
    json.put("RpcProcessingTime_avg_time", "0.003");
    byte[] data = json.toString().getBytes();
    JobTrackerProcessor p = new JobTrackerProcessor();
    ChunkImpl ch = new ChunkImpl("TestType", "Test", data.length, data,
        null);
    String failMsg = testProcessor(p, json, ch);
    assertNull(failMsg, failMsg);

    // test gauge metric
    json.put("maps_killed", "5");
    data = json.toString().getBytes();
    ch = new ChunkImpl("TestType", "Test", data.length, data, null);
    json.put("maps_killed", "2");
    failMsg = testProcessor(p, json, ch);
    assertNull(failMsg, failMsg);
  }
View Full Code Here

    json.put("RpcQueueTime_avg_time", "0.001");
    json.put("gcCount", "112");
    json.put("Transactions_num_ops", "3816");
    byte[] data = json.toString().getBytes();
    NamenodeProcessor p = new NamenodeProcessor();
    ChunkImpl ch = new ChunkImpl("TestType", "Test", data.length, data,
        null);
    String failMsg = testProcessor(p, json, ch);
    assertNull(failMsg, failMsg);

    // test gauge metric
    json.put("FilesCreated", "55");
    json.put("gcCount", "115");
    data = json.toString().getBytes();
    ch = new ChunkImpl("TestType", "Test", data.length, data, null);
    json.put("FilesCreated", "22");
    json.put("gcCount", "3");
    failMsg = testProcessor(p, json, ch);
    assertNull(failMsg, failMsg);
  }
View Full Code Here

    json.put("RpcQueueTime_avg_time", "0.001");
    json.put("gcCount", "112");
    json.put("Capacity", "22926269645");
    byte[] data = json.toString().getBytes();
    DatanodeProcessor p = new DatanodeProcessor();
    ChunkImpl ch = new ChunkImpl("TestType", "Test", data.length, data,
        null);
    String failMsg = testProcessor(p, json, ch);
    assertNull(failMsg, failMsg);

    // test gauge metric
    json.put("heartBeats_num_ops", "10980");
    json.put("gcCount", "115");
    data = json.toString().getBytes();
    ch = new ChunkImpl("TestType", "Test", data.length, data, null);
    json.put("heartBeats_num_ops", "105");
    json.put("gcCount", "3");
    failMsg = testProcessor(p, json, ch);
    assertNull(failMsg, failMsg);
  }
View Full Code Here

    JSONObject json = getJSONObject();
    json.put("splitSizeNumOps", "108");
    json.put("AverageLoad", "3.33");
    byte[] data = json.toString().getBytes();
    HBaseMasterProcessor p = new HBaseMasterProcessor();
    ChunkImpl ch = new ChunkImpl("TestType", "Test", data.length, data,
        null);
    String failMsg = testProcessor(p, json, ch);
    assertNull(failMsg, failMsg);

    // test gauge metric
    json.put("splitSizeNumOps", "109");
    data = json.toString().getBytes();
    ch = new ChunkImpl("TestType", "Test", data.length, data, null);
    json.put("splitSizeNumOps", "1");
    failMsg = testProcessor(p, json, ch);
    assertNull(failMsg, failMsg);
  }
View Full Code Here

    // test metric for each record type
    JSONObject json = getJSONObject();
    json.put("blockCacheSize", "2681872");
    byte[] data = json.toString().getBytes();
    HBaseMasterProcessor p = new HBaseMasterProcessor();
    ChunkImpl ch = new ChunkImpl("TestType", "Test", data.length, data,
        null);
    String failMsg = testProcessor(p, json, ch);
    assertNull(failMsg, failMsg);
    // no gauge metrics yet
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.chukwa.ChunkImpl

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.