Package co.cask.cdap.api.mapreduce

Examples of co.cask.cdap.api.mapreduce.MapReduceSpecification$Builder$NameSetter


  }

  @Override
  public void addMapReduce(MapReduce mapReduce) {
    Preconditions.checkArgument(mapReduce != null, "MapReduce cannot be null.");
    MapReduceSpecification spec = new DefaultMapReduceSpecification(mapReduce);
    mapReduces.put(spec.getName(), spec);
  }
View Full Code Here


                                       int reducerMemoryMB) {
    this(null, name, description, inputDataSet, outputDataSet, dataSets, properties, mapperMemoryMB, reducerMemoryMB);
  }

  public DefaultMapReduceSpecification(MapReduce mapReduce) {
    MapReduceSpecification configureSpec = mapReduce.configure();

    Set<String> dataSets = Sets.newHashSet(configureSpec.getDataSets());
    Map<String, String> properties = Maps.newHashMap(configureSpec.getProperties());

    Reflections.visit(mapReduce, TypeToken.of(mapReduce.getClass()),
                      new PropertyFieldExtractor(properties),
                      new DataSetFieldExtractor(dataSets));

    this.className = mapReduce.getClass().getName();
    this.name = configureSpec.getName();
    this.description = configureSpec.getDescription();
    this.inputDataSet = configureSpec.getInputDataSet();
    this.outputDataSet = configureSpec.getOutputDataSet();

    this.dataSets = ImmutableSet.copyOf(dataSets);
    this.properties = ImmutableMap.copyOf(properties);

    this.mapperMemoryMB = configureSpec.getMapperMemoryMB();
    this.reducerMemoryMB = configureSpec.getReducerMemoryMB();
  }
View Full Code Here

     * Adds a {@link MapReduce} job to this workflow.
     * @param mapReduce The map reduce job to add.
     * @return A {@link MapReduceSpecification} used for the given MapReduce job.
     */
    private MapReduceSpecification addWorkflowMapReduce(MapReduce mapReduce) {
      MapReduceSpecification mapReduceSpec = new DefaultMapReduceSpecification(mapReduce);

      // Rename the MapReduce job based on the step in the workflow.
      final String mapReduceName = String.format("%s_%s", name, mapReduceSpec.getName());
      mapReduceSpec = new ForwardingMapReduceSpecification(mapReduceSpec) {
        @Override
        public String getName() {
          return mapReduceName;
        }
View Full Code Here

      }

      @Override
      public MoreAction<T> startWith(MapReduce mapReduce) {
        Preconditions.checkArgument(mapReduce != null, "MapReduce is null.");
        MapReduceSpecification mapReduceSpec = builder.addWorkflowMapReduce(mapReduce);
        return startWith(new MapReduceWorkflowAction(mapReduce.configure().getName(), mapReduceSpec.getName()));
      }
View Full Code Here

      }

      @Override
      public T onlyWith(MapReduce mapReduce) {
        Preconditions.checkArgument(mapReduce != null, "MapReduce is null.");
        MapReduceSpecification mapReduceSpec = builder.addWorkflowMapReduce(mapReduce);
        return onlyWith(new MapReduceWorkflowAction(mapReduce.configure().getName(), mapReduceSpec.getName()));
      }
View Full Code Here

      }

      @Override
      public MoreAction<T> then(MapReduce mapReduce) {
        Preconditions.checkArgument(mapReduce != null, "MapReduce is null.");
        MapReduceSpecification mapReduceSpec = builder.addWorkflowMapReduce(mapReduce);
        return then(new MapReduceWorkflowAction(mapReduce.configure().getName(), mapReduceSpec.getName()));
      }
View Full Code Here

    Program program;
    try {
      program = Programs.create(locationFactory.create(programLocation), classLoader);
      // See if it is launched from Workflow, if it is, change the Program.
      if (workflowBatch != null) {
        MapReduceSpecification mapReduceSpec = program.getSpecification().getMapReduce().get(workflowBatch);
        Preconditions.checkArgument(mapReduceSpec != null, "Cannot find MapReduceSpecification for %s", workflowBatch);
        program = new WorkflowMapReduceProgram(program, mapReduceSpec);
      }
    } catch (IOException e) {
      LOG.error("Could not init Program based on location: " + programLocation);
      throw Throwables.propagate(e);
    }

    // Initializing dataset context and hooking it up with mapreduce job transaction

    DatasetFramework datasetFramework = injector.getInstance(DatasetFramework.class);
    CConfiguration configuration = injector.getInstance(CConfiguration.class);

    ApplicationSpecification programSpec = program.getSpecification();

    // if this is not for a mapper or a reducer, we don't need the metrics collection service
    MetricsCollectionService metricsCollectionService =
      (type == null) ? null : injector.getInstance(MetricsCollectionService.class);

    DiscoveryServiceClient discoveryServiceClient = injector.getInstance(DiscoveryServiceClient.class);

    // Creating mapreduce job context
    MapReduceSpecification spec = program.getSpecification().getMapReduce().get(program.getName());
    BasicMapReduceContext context =
      new BasicMapReduceContext(program, type, RunIds.fromString(runId),
                                runtimeArguments, programSpec.getDatasets().keySet(), spec, logicalStartTime,
                                workflowBatch, discoveryServiceClient, metricsCollectionService,
                                datasetFramework, configuration);
View Full Code Here

    ProgramType processorType = program.getType();
    Preconditions.checkNotNull(processorType, "Missing processor type.");
    Preconditions.checkArgument(processorType == ProgramType.MAPREDUCE, "Only MAPREDUCE process type is supported.");

    MapReduceSpecification spec = appSpec.getMapReduce().get(program.getName());
    Preconditions.checkNotNull(spec, "Missing MapReduceSpecification for %s", program.getName());

    LOG.info("Launching MapReduce program: " + program.getName() + ":" + spec.getName());
    TwillController controller = launcher.launch(new MapReduceTwillApplication(program, spec,
                                                                               hConfFile, cConfFile, eventHandler));

    return new MapReduceTwillProgramController(program.getName(), controller).startListen();
  }
View Full Code Here

    ProgramType processorType = program.getType();
    Preconditions.checkNotNull(processorType, "Missing processor type.");
    Preconditions.checkArgument(processorType == ProgramType.MAPREDUCE, "Only MAPREDUCE process type is supported.");

    MapReduceSpecification spec = appSpec.getMapReduce().get(program.getName());
    Preconditions.checkNotNull(spec, "Missing MapReduceSpecification for %s", program.getName());

    // Optionally get runId. If the map-reduce started by other program (e.g. Workflow), it inherit the runId.
    Arguments arguments = options.getArguments();
    RunId runId = arguments.hasOption(ProgramOptionConstants.RUN_ID)
                    ? RunIds.fromString(arguments.getOption(ProgramOptionConstants.RUN_ID))
                    : RunIds.generate();

    long logicalStartTime = arguments.hasOption(ProgramOptionConstants.LOGICAL_START_TIME)
                                ? Long.parseLong(arguments
                                                   .getOption(ProgramOptionConstants.LOGICAL_START_TIME))
                                : System.currentTimeMillis();

    String workflowBatch = arguments.getOption(ProgramOptionConstants.WORKFLOW_BATCH);
    MapReduce mapReduce;
    try {
      mapReduce = new InstantiatorFactory(false).get(TypeToken.of(program.<MapReduce>getMainClass())).create();
    } catch (Exception e) {
      LOG.error("Failed to instantiate MapReduce class for {}", spec.getClassName(), e);
      throw Throwables.propagate(e);
    }

    final BasicMapReduceContext context =
      new BasicMapReduceContext(program, null, runId, options.getUserArguments(),
View Full Code Here

  }

  @Override
  public Callable<MapReduceContext> create(String name) {

    final MapReduceSpecification mapReduceSpec = workflowSpec.getMapReduce().get(name);
    Preconditions.checkArgument(mapReduceSpec != null,
                                "No MapReduce with name %s found in Workflow %s", name, workflowSpec.getName());

    final Program mapReduceProgram = new WorkflowMapReduceProgram(workflowProgram, mapReduceSpec);
    final ProgramOptions options = new SimpleProgramOptions(
View Full Code Here

TOP

Related Classes of co.cask.cdap.api.mapreduce.MapReduceSpecification$Builder$NameSetter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.