Package com.cloudera.flume.handlers.hdfs

Examples of com.cloudera.flume.handlers.hdfs.WriteableEvent


  }

  @Override
  public void rawAppend(RawEvent evt) throws TException {
    try {
      WriteableEvent e = WriteableEvent.create(evt.getRaw());
      sink.append(e);
    } catch (Exception e) {
      // TODO figure out how to deal with different exns
      throw new TException("Caught exception " + e, e);
    }
View Full Code Here


    super(host, port);
  }

  @Override
  public void append(Event e) throws IOException {
    WriteableEvent we = new WriteableEvent(e);
    RawEvent re = new RawEvent(ByteBuffer.wrap(we.toBytes()));

    try {
      client.rawAppend(re);
      updateAppendStats(e);
    } catch (TException e1) {
View Full Code Here

    GunzipDecorator<EventSink> gunz = new GunzipDecorator<EventSink>(mem3);
    gunz.open();
    gunz.append(gzbe);
    Event gunze = mem3.next();

    int origsz = new WriteableEvent(be).toBytes().length;
    int gzipsz = new WriteableEvent(gzbe).toBytes().length;
    int ungzsz = new WriteableEvent(gunze).toBytes().length;

    LOG.info(String.format("before: %d  gzip: %d  gunzip: %d", origsz, gzipsz,
        ungzsz));

    assertTrue(origsz > gzipsz); // got some benefit for compressing?
View Full Code Here

      System.out.println(f.getName());
      fcount++;
      boolean hasNext = true;
      while (hasNext) {
        WriteableEventKey k = new WriteableEventKey();
        WriteableEvent e = new WriteableEvent();
        hasNext = reader.next(k, e);
        if (hasNext) {
          writer.append(k, e);
          count++;
        }
View Full Code Here

    b.mark("hdfs_fileopen_started");

    Event e = null;
    while ((e = mem.next()) != null) {
      // writing
      w.append(new WriteableEventKey(e), new WriteableEvent(e));
    }
    w.close();
    b.mark("seqfile_hdfs_write");

    hdfs.close();
View Full Code Here

    super(s);
  }

  @Override
  public void append(Event e) throws IOException, InterruptedException {
    WriteableEvent we = new WriteableEvent(e);
    byte[] bs = we.toBytes();
    eventSize.addAndGet(bs.length);

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    GZIPOutputStream gzos = new GZIPOutputStream(baos);
    gzos.write(bs);
View Full Code Here

  Event batchevent(List<Event> evts) throws IOException {
    ByteArrayOutputStream baos = new ByteArrayOutputStream(2 >> 15);
    DataOutput out = new DataOutputStream(baos);
    for (Event evt : events) {
      WriteableEvent we = new WriteableEvent(evt);
      we.write(out);
    }

    Event be = new EventImpl(new byte[0]);
    ByteBuffer b = ByteBuffer.allocate(4);
    b.putInt(events.size());
View Full Code Here

    gzipSize.addAndGet(bs.length);
    ByteArrayInputStream bais = new ByteArrayInputStream(bs);
    GZIPInputStreamExposeSize gzis = new GZIPInputStreamExposeSize(bais);
    DataInputStream dis = new DataInputStream(gzis);

    WriteableEvent out = new WriteableEvent();
    out.readFields(dis);
    long gunSz = gzis.getDecompressSize(); // must be done before closed.
    dis.close();
    super.append(out);
    gunzipSize.addAndGet(gunSz);
  }
View Full Code Here

    int sz = ByteBuffer.wrap(e.get(BatchingDecorator.BATCH_SIZE)).getInt();
    byte[] data = e.get(BatchingDecorator.BATCH_DATA);
    DataInput in = new DataInputStream(new ByteArrayInputStream(data));
    batchCnt.incrementAndGet();
    for (int i = 0; i < sz; i++) {
      WriteableEvent we = new WriteableEvent();
      we.readFields(in);
      super.append(we);
      unbatchedCnt.incrementAndGet();
    }
  }
View Full Code Here

    b.mark("hdfs_fileopen_started");

    Event e = null;
    while ((e = mem.next()) != null) {
      // writing
      w.append(new WriteableEventKey(e), new WriteableEvent(e));
    }
    w.close();
    b.mark("seqfile_hdfs_write");

    hdfs.close();
View Full Code Here

TOP

Related Classes of com.cloudera.flume.handlers.hdfs.WriteableEvent

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.