Package co.cask.cdap.data2.datafabric.dataset.service.executor

Examples of co.cask.cdap.data2.datafabric.dataset.service.executor.RemoteDatasetOpExecutor


    private StreamProperty currentProperty;

    private StreamPropertyChangeListener(StreamAdmin streamAdmin, String streamName, StreamPropertyListener listener) {
      this.listener = listener;
      try {
        StreamConfig streamConfig = streamAdmin.getConfig(streamName);
        this.currentProperty = new StreamProperty(StreamUtils.getGeneration(streamConfig), streamConfig.getTTL());
      } catch (Exception e) {
        // It's ok if the stream config is not yet available (meaning no data has ever been writen to the stream yet.
        this.currentProperty = new StreamProperty(0, Long.MAX_VALUE);
      }
    }
View Full Code Here


  public void getInfo(HttpRequest request, HttpResponder responder,
                      @PathParam("stream") String stream) throws Exception {
    String accountID = getAuthenticatedAccountId(request);

    if (streamMetaStore.streamExists(accountID, stream)) {
      StreamConfig streamConfig = streamAdmin.getConfig(stream);
      StreamProperties streamProperties = new StreamProperties(streamConfig.getName(), streamConfig.getTTL());
      responder.sendJson(HttpResponseStatus.OK, streamProperties, StreamProperties.class, GSON);
    } else {
      responder.sendStatus(HttpResponseStatus.NOT_FOUND);
    }
  }
View Full Code Here

      responder.sendString(HttpResponseStatus.NOT_FOUND, "Stream does not exists");
      return;
    }

    try {
      StreamConfig config = streamAdmin.getConfig(stream);
      try {
        config = getConfigUpdate(request, config);
        if (config.getTTL() < 0) {
          responder.sendString(HttpResponseStatus.BAD_REQUEST, "TTL value should be positive");
          return;
        }
      } catch (Throwable t) {
        responder.sendString(HttpResponseStatus.BAD_REQUEST, "Invalid stream configuration");
View Full Code Here

    // Only pickup changes in TTL
    if (json.has("ttl")) {
      JsonElement ttl = json.get("ttl");
      if (ttl.isJsonPrimitive()) {
        // TTL in the REST API is in seconds. Convert it to ms for the config.
        return new StreamConfig(config.getName(), config.getPartitionDuration(), config.getIndexInterval(),
                                TimeUnit.SECONDS.toMillis(ttl.getAsLong()), config.getLocation());
      }
    }
    return config;
  }
View Full Code Here

                                             new DataFabricModules().getDistributedModules(),
                                             new DataSetsModules().getDistributedModule(),
                                             new LocationRuntimeModule().getDistributedModules());

    StreamAdmin streamAdmin = injector.getInstance(StreamAdmin.class);
    StreamConfig streamConfig = streamAdmin.getConfig(streamName);
    Location streamLocation = streamConfig.getLocation();
    List<Location> eventFiles = Lists.newArrayList();

    for (Location partition : streamLocation.list()) {
      if (!partition.isDirectory()) {
        continue;
View Full Code Here

    Supplier<FileWriter<StreamEvent>> create(final String streamName) {
      return new Supplier<FileWriter<StreamEvent>>() {
        @Override
        public FileWriter<StreamEvent> get() {
          try {
            StreamConfig streamConfig = streamAdmin.getConfig(streamName);
            Integer generation = generations.get(streamName);
            if (generation == null) {
              generation = StreamUtils.getGeneration(streamConfig);
            }
View Full Code Here

    if (!verifyGetEventsRequest(accountID, stream, startTime, endTime, limit, responder)) {
      return;
    }

    StreamConfig streamConfig = streamAdmin.getConfig(stream);
    startTime = Math.max(startTime, System.currentTimeMillis() - streamConfig.getTTL());

    // Create the stream event reader
    FileReader<StreamEventOffset, Iterable<StreamFileOffset>> reader = createReader(streamConfig, startTime);
    try {
      ReadFilter readFilter = createReadFilter(startTime, endTime);
View Full Code Here

  }

  @Override
  public StreamConsumer createStreamConsumer(QueueName streamName, ConsumerConfig consumerConfig) throws IOException {
    String namespace = String.format("%s.%s", programId.getApplicationId(), programId.getId());
    final StreamConsumer consumer = streamConsumerFactory.create(streamName, namespace, consumerConfig);

    dataSetContext.addTransactionAware(consumer);
    return new ForwardingStreamConsumer(consumer) {
      @Override
      public void close() throws IOException {
View Full Code Here

    this.eventTransform = eventTransform;
  }

  @Override
  public InputDatum<T> dequeue(long timeout, TimeUnit timeoutUnit) throws IOException, InterruptedException {
    StreamConsumer consumer = consumerSupplier.get();
    return new BasicInputDatum<StreamEvent, T>(consumer.getStreamName(),
                                               consumer.poll(batchSize, timeout, timeoutUnit), eventTransform);
  }
View Full Code Here

      if (queueName.isQueue()) {
        QueueConsumer queueConsumer = dataFabricFacade.createConsumer(queueName, config, numGroups);
        consumerConfig = queueConsumer.getConfig();
        consumer = queueConsumer;
      } else {
        StreamConsumer streamConsumer = dataFabricFacade.createStreamConsumer(queueName, config);
        consumerConfig = streamConsumer.getConsumerConfig();
        consumer = streamConsumer;
      }
    } catch (Exception e) {
      throw Throwables.propagate(e);
    }
View Full Code Here

TOP

Related Classes of co.cask.cdap.data2.datafabric.dataset.service.executor.RemoteDatasetOpExecutor

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.