Examples of AbstractJobVertex


Examples of org.apache.flink.runtime.jobgraph.AbstractJobVertex

      inputConfig.setOutputSerializer(serializer);
      inputConfig.addOutputShipStrategy(ShipStrategyType.FORWARD);
    }

    // - head ------------------------------------------------------------------------------------------------------
    AbstractJobVertex head = JobGraphUtils.createTask(IterationHeadPactTask.class, "Iteration Head", jobGraph, numSubTasks);
    TaskConfig headConfig = new TaskConfig(head.getConfiguration());
    {
      headConfig.setIterationId(ITERATION_ID);

      // input to iteration head
      headConfig.addInputToGroup(0);
      headConfig.setInputSerializer(serializer, 0);
      headConfig.setInputLocalStrategy(0, LocalStrategy.NONE);
      headConfig.setIterationHeadPartialSolutionOrWorksetInputIndex(0);

      // output into iteration
      headConfig.setOutputSerializer(serializer);
      headConfig.addOutputShipStrategy(ShipStrategyType.PARTITION_HASH);
      headConfig.setOutputComparator(comparator, 0);

      // final output
      TaskConfig headFinalOutConfig = new TaskConfig(new Configuration());
      headFinalOutConfig.setOutputSerializer(serializer);
      headFinalOutConfig.addOutputShipStrategy(ShipStrategyType.FORWARD);
      headConfig.setIterationHeadFinalOutputConfig(headFinalOutConfig);

      // the sync
      headConfig.setIterationHeadIndexOfSyncOutput(2);

      // driver
      headConfig.setDriver(CollectorMapDriver.class);
      headConfig.setDriverStrategy(DriverStrategy.COLLECTOR_MAP);
      headConfig.setStubWrapper(new UserCodeClassWrapper<DummyMapper>(DummyMapper.class));

      // back channel
      headConfig.setRelativeBackChannelMemory(1.0);
    }

    // - tail ------------------------------------------------------------------------------------------------------
    AbstractJobVertex tail = JobGraphUtils.createTask(IterationTailPactTask.class, "Chained Iteration Tail", jobGraph, numSubTasks);
    TaskConfig tailConfig = new TaskConfig(tail.getConfiguration());
    {
      tailConfig.setIterationId(ITERATION_ID);

      // inputs and driver
      tailConfig.addInputToGroup(0);
      tailConfig.setInputSerializer(serializer, 0);

      // output
      tailConfig.addOutputShipStrategy(ShipStrategyType.FORWARD);
      tailConfig.setOutputSerializer(serializer);

      // the driver
      tailConfig.setDriver(GroupReduceDriver.class);
      tailConfig.setDriverStrategy(DriverStrategy.SORTED_GROUP_REDUCE);
      tailConfig.setDriverComparator(comparator, 0);
      tailConfig.setStubWrapper(new UserCodeClassWrapper<DummyReducer>(DummyReducer.class));

      // chained mapper
      TaskConfig chainedMapperConfig = new TaskConfig(new Configuration());
      chainedMapperConfig.setDriverStrategy(DriverStrategy.COLLECTOR_MAP);
      chainedMapperConfig.setStubWrapper(new UserCodeClassWrapper<IncrementCoordinatesMapper>(
        IncrementCoordinatesMapper.class));

      chainedMapperConfig.setInputLocalStrategy(0, LocalStrategy.NONE);
      chainedMapperConfig.setInputSerializer(serializer, 0);

      chainedMapperConfig.setOutputSerializer(serializer);

      chainedMapperConfig.setIsWorksetUpdate();

      tailConfig.addChainedTask(ChainedCollectorMapDriver.class, chainedMapperConfig, "Chained ID Mapper");
    }

    // - output ----------------------------------------------------------------------------------------------------
    OutputFormatVertex output = JobGraphUtils.createFileOutput(jobGraph, "Output", numSubTasks);
    TaskConfig outputConfig = new TaskConfig(output.getConfiguration());
    {
      outputConfig.addInputToGroup(0);
      outputConfig.setInputSerializer(serializer, 0);

      outputConfig.setStubWrapper(new UserCodeClassWrapper<PointOutFormat>(PointOutFormat.class));
      outputConfig.setStubParameter(FileOutputFormat.FILE_PARAMETER_KEY, outputPath);
    }

    // - sync ------------------------------------------------------------------------------------------------------
    AbstractJobVertex sync = JobGraphUtils.createSync(jobGraph, numSubTasks);
    TaskConfig syncConfig = new TaskConfig(sync.getConfiguration());
    syncConfig.setNumberOfIterations(maxIterations);
    syncConfig.setIterationId(ITERATION_ID);

    // --------------------------------------------------------------------------------------------------------------
    // 2. EDGES
    // --------------------------------------------------------------------------------------------------------------
    JobGraphUtils.connect(input, head, ChannelType.IN_MEMORY, DistributionPattern.POINTWISE);

    JobGraphUtils.connect(head, tail, ChannelType.IN_MEMORY, DistributionPattern.BIPARTITE);
    tailConfig.setGateIterativeWithNumberOfEventsUntilInterrupt(0, numSubTasks);

    JobGraphUtils.connect(head, output, ChannelType.IN_MEMORY, DistributionPattern.POINTWISE);

    JobGraphUtils.connect(head, sync, ChannelType.NETWORK, DistributionPattern.POINTWISE);

    // --------------------------------------------------------------------------------------------------------------
    // 3. INSTANCE SHARING
    // --------------------------------------------------------------------------------------------------------------
   
    SlotSharingGroup sharingGroup = new SlotSharingGroup();
   
    input.setSlotSharingGroup(sharingGroup);
    head.setSlotSharingGroup(sharingGroup);
    tail.setSlotSharingGroup(sharingGroup);
    output.setSlotSharingGroup(sharingGroup);
    sync.setSlotSharingGroup(sharingGroup);
   
    tail.setStrictlyCoLocatedWith(head);

    return jobGraph;
  }
View Full Code Here

Examples of org.apache.flink.runtime.jobgraph.AbstractJobVertex

 
  @Test
  public void testNToN() {
    final int N = 23;
   
    AbstractJobVertex v1 = new AbstractJobVertex("vertex1");
    AbstractJobVertex v2 = new AbstractJobVertex("vertex2");
 
    v1.setParallelism(N);
    v2.setParallelism(N);
 
    v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE);
 
    List<AbstractJobVertex> ordered = new ArrayList<AbstractJobVertex>(Arrays.asList(v1, v2));

    ExecutionGraph eg = new ExecutionGraph(jobId, jobName, cfg);
    try {
      eg.attachJobGraph(ordered);
    }
    catch (JobException e) {
      e.printStackTrace();
      fail("Job failed with exception: " + e.getMessage());
    }
   
    ExecutionJobVertex target = eg.getAllVertices().get(v2.getID());
   
    for (ExecutionVertex ev : target.getTaskVertices()) {
      assertEquals(1, ev.getNumberOfInputs());
     
      ExecutionEdge[] inEdges = ev.getInputEdges(0);
View Full Code Here

Examples of org.apache.flink.runtime.jobgraph.AbstractJobVertex

 
  @Test
  public void test2NToN() {
    final int N = 17;
   
    AbstractJobVertex v1 = new AbstractJobVertex("vertex1");
    AbstractJobVertex v2 = new AbstractJobVertex("vertex2");
 
    v1.setParallelism(2 * N);
    v2.setParallelism(N);
 
    v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE);
 
    List<AbstractJobVertex> ordered = new ArrayList<AbstractJobVertex>(Arrays.asList(v1, v2));

    ExecutionGraph eg = new ExecutionGraph(jobId, jobName, cfg);
    try {
      eg.attachJobGraph(ordered);
    }
    catch (JobException e) {
      e.printStackTrace();
      fail("Job failed with exception: " + e.getMessage());
    }
   
    ExecutionJobVertex target = eg.getAllVertices().get(v2.getID());
   
    for (ExecutionVertex ev : target.getTaskVertices()) {
      assertEquals(1, ev.getNumberOfInputs());
     
      ExecutionEdge[] inEdges = ev.getInputEdges(0);
View Full Code Here

Examples of org.apache.flink.runtime.jobgraph.AbstractJobVertex

 
  @Test
  public void test3NToN() {
    final int N = 17;
   
    AbstractJobVertex v1 = new AbstractJobVertex("vertex1");
    AbstractJobVertex v2 = new AbstractJobVertex("vertex2");
 
    v1.setParallelism(3 * N);
    v2.setParallelism(N);
 
    v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE);
 
    List<AbstractJobVertex> ordered = new ArrayList<AbstractJobVertex>(Arrays.asList(v1, v2));

    ExecutionGraph eg = new ExecutionGraph(jobId, jobName, cfg);
    try {
      eg.attachJobGraph(ordered);
    }
    catch (JobException e) {
      e.printStackTrace();
      fail("Job failed with exception: " + e.getMessage());
    }
   
    ExecutionJobVertex target = eg.getAllVertices().get(v2.getID());
   
    for (ExecutionVertex ev : target.getTaskVertices()) {
      assertEquals(1, ev.getNumberOfInputs());
     
      ExecutionEdge[] inEdges = ev.getInputEdges(0);
View Full Code Here

Examples of org.apache.flink.runtime.jobgraph.AbstractJobVertex

 
  @Test
  public void testNTo2N() {
    final int N = 41;
   
    AbstractJobVertex v1 = new AbstractJobVertex("vertex1");
    AbstractJobVertex v2 = new AbstractJobVertex("vertex2");
 
    v1.setParallelism(N);
    v2.setParallelism(2 * N);
 
    v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE);
 
    List<AbstractJobVertex> ordered = new ArrayList<AbstractJobVertex>(Arrays.asList(v1, v2));

    ExecutionGraph eg = new ExecutionGraph(jobId, jobName, cfg);
    try {
      eg.attachJobGraph(ordered);
    }
    catch (JobException e) {
      e.printStackTrace();
      fail("Job failed with exception: " + e.getMessage());
    }
   
    ExecutionJobVertex target = eg.getAllVertices().get(v2.getID());
   
    for (ExecutionVertex ev : target.getTaskVertices()) {
      assertEquals(1, ev.getNumberOfInputs());
     
      ExecutionEdge[] inEdges = ev.getInputEdges(0);
View Full Code Here

Examples of org.apache.flink.runtime.jobgraph.AbstractJobVertex

 
  @Test
  public void testNTo7N() {
    final int N = 11;
   
    AbstractJobVertex v1 = new AbstractJobVertex("vertex1");
    AbstractJobVertex v2 = new AbstractJobVertex("vertex2");
 
    v1.setParallelism(N);
    v2.setParallelism(7 * N);
 
    v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE);
 
    List<AbstractJobVertex> ordered = new ArrayList<AbstractJobVertex>(Arrays.asList(v1, v2));

    ExecutionGraph eg = new ExecutionGraph(jobId, jobName, cfg);
    try {
      eg.attachJobGraph(ordered);
    }
    catch (JobException e) {
      e.printStackTrace();
      fail("Job failed with exception: " + e.getMessage());
    }
   
    ExecutionJobVertex target = eg.getAllVertices().get(v2.getID());
   
    for (ExecutionVertex ev : target.getTaskVertices()) {
      assertEquals(1, ev.getNumberOfInputs());
     
      ExecutionEdge[] inEdges = ev.getInputEdges(0);
View Full Code Here

Examples of org.apache.flink.runtime.jobgraph.AbstractJobVertex

    }
   
    final int factor = highDop / lowDop;
    final int delta = highDop % lowDop == 0 ? 0 : 1;
   
    AbstractJobVertex v1 = new AbstractJobVertex("vertex1");
    AbstractJobVertex v2 = new AbstractJobVertex("vertex2");
 
    v1.setParallelism(lowDop);
    v2.setParallelism(highDop);
 
    v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE);
 
    List<AbstractJobVertex> ordered = new ArrayList<AbstractJobVertex>(Arrays.asList(v1, v2));

    ExecutionGraph eg = new ExecutionGraph(jobId, jobName, cfg);
    try {
      eg.attachJobGraph(ordered);
    }
    catch (JobException e) {
      e.printStackTrace();
      fail("Job failed with exception: " + e.getMessage());
    }
   
    ExecutionJobVertex target = eg.getAllVertices().get(v2.getID());
   
    int[] timesUsed = new int[lowDop];
   
    for (ExecutionVertex ev : target.getTaskVertices()) {
      assertEquals(1, ev.getNumberOfInputs());
View Full Code Here

Examples of org.apache.flink.runtime.jobgraph.AbstractJobVertex

    }
   
    final int factor = highDop / lowDop;
    final int delta = highDop % lowDop == 0 ? 0 : 1;
   
    AbstractJobVertex v1 = new AbstractJobVertex("vertex1");
    AbstractJobVertex v2 = new AbstractJobVertex("vertex2");
 
    v1.setParallelism(highDop);
    v2.setParallelism(lowDop);
 
    v2.connectNewDataSetAsInput(v1, DistributionPattern.POINTWISE);
 
    List<AbstractJobVertex> ordered = new ArrayList<AbstractJobVertex>(Arrays.asList(v1, v2));

    ExecutionGraph eg = new ExecutionGraph(jobId, jobName, cfg);
    try {
      eg.attachJobGraph(ordered);
    }
    catch (JobException e) {
      e.printStackTrace();
      fail("Job failed with exception: " + e.getMessage());
    }
   
    ExecutionJobVertex target = eg.getAllVertices().get(v2.getID());
   
    int[] timesUsed = new int[highDop];
   
    for (ExecutionVertex ev : target.getTaskVertices()) {
      assertEquals(1, ev.getNumberOfInputs());
View Full Code Here

Examples of org.apache.flink.runtime.jobgraph.AbstractJobVertex

    int parallelism = vertexParallelism.get(vertexName);
    byte[] outputSelector = outputSelectors.get(vertexName);
    Map<String, OperatorState<?>> state = operatorStates.get(vertexName);

    // Create vertex object
    AbstractJobVertex vertex = new AbstractJobVertex(vertexName);

    this.jobGraph.addVertex(vertex);

    vertex.setInvokableClass(vertexClass);
    vertex.setParallelism(parallelism);
    if (LOG.isDebugEnabled()) {
      LOG.debug("Parallelism set: {} for {}", parallelism, vertexName);
    }

    StreamConfig config = new StreamConfig(vertex.getConfiguration());

    config.setMutability(mutability.get(vertexName));
    config.setBufferTimeout(bufferTimeout.get(vertexName));

    config.setTypeWrapperIn1(typeWrapperIn1.get(vertexName));
View Full Code Here

Examples of org.apache.flink.runtime.jobgraph.AbstractJobVertex

   *            The partitioner
   */
  private <T> void connect(String upStreamVertexName, String downStreamVertexName,
      StreamPartitioner<T> partitionerObject) {

    AbstractJobVertex upStreamVertex = streamVertices.get(upStreamVertexName);
    AbstractJobVertex downStreamVertex = streamVertices.get(downStreamVertexName);

    StreamConfig config = new StreamConfig(upStreamVertex.getConfiguration());

    if (partitionerObject.getClass().equals(ForwardPartitioner.class)) {
      downStreamVertex
          .connectNewDataSetAsInput(upStreamVertex, DistributionPattern.POINTWISE);
    } else {
      downStreamVertex
          .connectNewDataSetAsInput(upStreamVertex, DistributionPattern.BIPARTITE);
    }

    if (LOG.isDebugEnabled()) {
      LOG.debug("CONNECTED: {} - {} -> {}", partitionerObject.getClass().getSimpleName(),
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.