Package org.apache.flume.instrumentation

Examples of org.apache.flume.instrumentation.SinkCounter


      logger.error("Could not instantiate event serializer." , e);
      Throwables.propagate(e);
    }

    if(sinkCounter == null) {
      sinkCounter = new SinkCounter(this.getName());
    }
    timeout = context.getLong(HBaseSinkConfigurationConstants.CONFIG_TIMEOUT,
            HBaseSinkConfigurationConstants.DEFAULT_TIMEOUT);
    if(timeout <= 0){
      logger.warn("Timeout should be positive for Hbase sink. "
View Full Code Here


          RpcClientConfigurationConstants.CONFIG_REQUEST_TIMEOUT,
          String.valueOf(requestTimeout));
    }

    if (sinkCounter == null) {
      sinkCounter = new SinkCounter(getName());
    }
  }
View Full Code Here

      logger.error("Could not instantiate event serializer.", e);
      Throwables.propagate(e);
    }

    if (sinkCounter == null) {
      sinkCounter = new SinkCounter(getName());
    }

    Preconditions.checkState(StringUtils.isNotBlank(indexName),
        "Missing Param:" + INDEX_NAME);
    Preconditions.checkState(StringUtils.isNotBlank(indexType),
View Full Code Here

            "must be > 0 and <= 24");
      }
    }

    if (sinkCounter == null) {
      sinkCounter = new SinkCounter(getName());
    }
  }
View Full Code Here

        DatasetSinkConstants.DEFAULT_BATCH_SIZE);
    this.rollIntervalS = context.getInteger(
        DatasetSinkConstants.CONFIG_KITE_ROLL_INTERVAL,
        DatasetSinkConstants.DEFAULT_ROLL_INTERVAL);

    this.counter = new SinkCounter(datasetName);
  }
View Full Code Here

      logger.error("Could not instantiate event serializer.", e);
      Throwables.propagate(e);
    }

    if (sinkCounter == null) {
      sinkCounter = new SinkCounter(getName());
    }

    String indexNameBuilderClass = DEFAULT_INDEX_NAME_BUILDER_CLASS;
    if (StringUtils.isNotBlank(context.getString(INDEX_NAME_BUILDER))) {
      indexNameBuilderClass = context.getString(INDEX_NAME_BUILDER);
    }

    Context indexnameBuilderContext = new Context();
    serializerContext.putAll(
            context.getSubProperties(INDEX_NAME_BUILDER_PREFIX));

    try {
      @SuppressWarnings("unchecked")
      Class<? extends IndexNameBuilder> clazz
              = (Class<? extends IndexNameBuilder>) Class
              .forName(indexNameBuilderClass);
      indexNameBuilder = clazz.newInstance();
      indexnameBuilderContext.put(INDEX_NAME, indexName);
      indexNameBuilder.configure(indexnameBuilderContext);
    } catch (Exception e) {
      logger.error("Could not instantiate index name builder.", e);
      Throwables.propagate(e);
    }

    if (sinkCounter == null) {
      sinkCounter = new SinkCounter(getName());
    }

    Preconditions.checkState(StringUtils.isNotBlank(indexName),
        "Missing Param:" + INDEX_NAME);
    Preconditions.checkState(StringUtils.isNotBlank(indexType),
View Full Code Here

    for (Entry<String, String> entry: context.getParameters().entrySet()) {
      clientProps.setProperty(entry.getKey(), entry.getValue());
    }

    if (sinkCounter == null) {
      sinkCounter = new SinkCounter(getName());
    }
    cxnResetInterval = context.getInteger("reset-connection-interval",
      DEFAULT_CXN_RESET_INTERVAL);
    if(cxnResetInterval == DEFAULT_CXN_RESET_INTERVAL) {
      logger.info("Connection reset is set to " + String.valueOf
View Full Code Here

    MockHDFSWriter hdfsWriter = new MockHDFSWriter();
    HDFSTextSerializer formatter = new HDFSTextSerializer();
    BucketWriter bucketWriter = new BucketWriter(ROLL_INTERVAL, 0, 0,
      0, ctx, "/tmp", "file", PREFIX, ".tmp", null, null,
      SequenceFile.CompressionType.NONE, hdfsWriter,
      timedRollerPool, null, new SinkCounter(
        "test-bucket-writer-" + System.currentTimeMillis()), 0,
      null, null, 30000, Executors.newSingleThreadExecutor(), 0, 0);

    Event e = EventBuilder.withBody("foo", Charsets.UTF_8);
    bucketWriter.append(e);
View Full Code Here

    MockHDFSWriter hdfsWriter = new MockHDFSWriter();
    HDFSTextSerializer serializer = new HDFSTextSerializer();
    BucketWriter bucketWriter = new BucketWriter(ROLL_INTERVAL, 0, 0,
      0, ctx, "/tmp", "file", "", SUFFIX, null, null,
      SequenceFile.CompressionType.NONE, hdfsWriter,
      timedRollerPool, null, new SinkCounter(
        "test-bucket-writer-" + System.currentTimeMillis()), 0,
      null, null, 30000, Executors.newSingleThreadExecutor(), 0, 0);

    Event e = EventBuilder.withBody("foo", Charsets.UTF_8);
    bucketWriter.append(e);
View Full Code Here

    MockHDFSWriter hdfsWriter = new MockHDFSWriter();
    BucketWriter bucketWriter = new BucketWriter(ROLL_INTERVAL, 0, 0,
      0, ctx, "/tmp", "file", "", SUFFIX, null, null,
      SequenceFile.CompressionType.NONE,
      hdfsWriter, timedRollerPool, null,
      new SinkCounter(
        "test-bucket-writer-" + System.currentTimeMillis()), 0,
      new HDFSEventSink.WriterCallback() {
      @Override
      public void run(String filePath) {
        callbackCalled.set(true);
View Full Code Here

TOP

Related Classes of org.apache.flume.instrumentation.SinkCounter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.