Package org.apache.flink.api.common.aggregators

Examples of org.apache.flink.api.common.aggregators.LongSumAggregator


      IterativeDataSet<Tuple2<Long, Long>> iteration =
          initialSolutionSet.iterate(MAX_ITERATIONS);

      // register the convergence criterion
      iteration.registerAggregationConvergenceCriterion(UPDATED_ELEMENTS,
          new LongSumAggregator(), new UpdatedElementsConvergenceCriterion(convergence_threshold));

      DataSet<Tuple2<Long, Long>> verticesWithNewComponents = iteration.join(edges).where(0).equalTo(0)
          .with(new NeighborWithComponentIDJoin())
          .groupBy(0).reduceGroup(new MinimumReduce());
View Full Code Here


        DataSet<Integer> initialSolutionSet = CollectionDataSets.getIntegerDataSet(env);
        IterativeDataSet<Integer> iteration = initialSolutionSet.iterate(MAX_ITERATIONS);

        // register aggregator
        LongSumAggregator aggr = new LongSumAggregator();
        iteration.registerAggregator(NEGATIVE_ELEMENTS_AGGR, aggr);
       
        // register convergence criterion
        iteration.registerAggregationConvergenceCriterion(NEGATIVE_ELEMENTS_AGGR, aggr,
            new NegativeElementsConvergenceCriterion());
       
        DataSet<Integer> updatedDs = iteration.map(new SubtractOneMap());
        iteration.closeWith(updatedDs).writeAsText(resultPath);
        env.execute();

        // return expected result
        return "-3\n" + "-2\n" + "-2\n" + "-1\n" + "-1\n"
             + "-1\n" + "0\n" + "0\n" + "0\n" + "0\n"
             + "1\n" + "1\n" + "1\n" + "1\n" + "1\n";
      }
      case 2: {
        /*
         * Test aggregator with parameter for iterate
         */
       
        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        env.setDegreeOfParallelism(DOP);

        DataSet<Integer> initialSolutionSet = CollectionDataSets.getIntegerDataSet(env);
        IterativeDataSet<Integer> iteration = initialSolutionSet.iterate(MAX_ITERATIONS);

        // register aggregator
        LongSumAggregatorWithParameter aggr = new LongSumAggregatorWithParameter(0);
        iteration.registerAggregator(NEGATIVE_ELEMENTS_AGGR, aggr);
       
        // register convergence criterion
        iteration.registerAggregationConvergenceCriterion(NEGATIVE_ELEMENTS_AGGR, aggr,
            new NegativeElementsConvergenceCriterion());
       
        DataSet<Integer> updatedDs = iteration.map(new SubtractOneMapWithParam());
        iteration.closeWith(updatedDs).writeAsText(resultPath);
        env.execute();
       
        // return expected result
        return "-3\n" + "-2\n" + "-2\n" + "-1\n" + "-1\n"
             + "-1\n" + "0\n" + "0\n" + "0\n" + "0\n"
             + "1\n" + "1\n" + "1\n" + "1\n" + "1\n";
      }
      case 3: {
        /*
         * Test convergence criterion with parameter for iterate
         */
       
        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        env.setDegreeOfParallelism(DOP);

        DataSet<Integer> initialSolutionSet = CollectionDataSets.getIntegerDataSet(env);
        IterativeDataSet<Integer> iteration = initialSolutionSet.iterate(MAX_ITERATIONS);

        // register aggregator
        LongSumAggregator aggr = new LongSumAggregator();
        iteration.registerAggregator(NEGATIVE_ELEMENTS_AGGR, aggr);
       
        // register convergence criterion
        iteration.registerAggregationConvergenceCriterion(NEGATIVE_ELEMENTS_AGGR, aggr,
            new NegativeElementsConvergenceCriterionWithParam(3));
       
        DataSet<Integer> updatedDs = iteration.map(new SubtractOneMap());
        iteration.closeWith(updatedDs).writeAsText(resultPath);
        env.execute();
       
        // return expected result
        return "-3\n" + "-2\n" + "-2\n" + "-1\n" + "-1\n"
             + "-1\n" + "0\n" + "0\n" + "0\n" + "0\n"
             + "1\n" + "1\n" + "1\n" + "1\n" + "1\n";
      }
      case 4: {
        /*
         * Test aggregator without parameter for iterateDelta
         */
       
        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        env.setDegreeOfParallelism(DOP);
       
        DataSet<Tuple2<Integer, Integer>> initialSolutionSet = CollectionDataSets.getIntegerDataSet(env).map(new TupleMakerMap());
           
        DeltaIteration<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> iteration = initialSolutionSet.iterateDelta(
            initialSolutionSet, MAX_ITERATIONS, 0);

        // register aggregator
        LongSumAggregator aggr = new LongSumAggregator();
        iteration.registerAggregator(NEGATIVE_ELEMENTS_AGGR, aggr);
       
        DataSet<Tuple2<Integer, Integer>> updatedDs = iteration.getWorkset().map(new AggregateMapDelta());
       
        DataSet<Tuple2<Integer, Integer>> newElements = updatedDs.join(iteration.getSolutionSet())
            .where(0).equalTo(0).flatMap(new UpdateFilter());
       
        DataSet<Tuple2<Integer, Integer>> iterationRes = iteration.closeWith(newElements, newElements);
        DataSet<Integer> result = iterationRes.map(new ProjectSecondMapper());
        result.writeAsText(resultPath);
       
        env.execute();
       
        // return expected result
        return "1\n" + "2\n" + "2\n" + "3\n" + "3\n"
             + "3\n" + "4\n" + "4\n" + "4\n" + "4\n"
             + "5\n" + "5\n" + "5\n" + "5\n" + "5\n";
       
      }
      case 5: {
        /*
         * Test aggregator with parameter for iterateDelta
         */
       
        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        env.setDegreeOfParallelism(DOP);
       
        DataSet<Tuple2<Integer, Integer>> initialSolutionSet = CollectionDataSets.getIntegerDataSet(env).map(new TupleMakerMap());
           
        DeltaIteration<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> iteration = initialSolutionSet.iterateDelta(
            initialSolutionSet, MAX_ITERATIONS, 0);

        // register aggregator
        LongSumAggregator aggr = new LongSumAggregatorWithParameter(4);
        iteration.registerAggregator(NEGATIVE_ELEMENTS_AGGR, aggr);
       
        DataSet<Tuple2<Integer, Integer>> updatedDs = iteration.getWorkset().map(new AggregateMapDelta());
       
        DataSet<Tuple2<Integer, Integer>> newElements = updatedDs.join(iteration.getSolutionSet())
View Full Code Here

        vertexIteration.addBroadcastSetForUpdateFunction(BC_SET_UPDATES_NAME, bcUpdate);
       
        vertexIteration.setName(ITERATION_NAME);
        vertexIteration.setParallelism(ITERATION_DOP);
       
        vertexIteration.registerAggregator(AGGREGATOR_NAME, new LongSumAggregator());
       
        result = initialVertices.runOperation(vertexIteration);
      }
     
     
View Full Code Here

        vertexIteration.addBroadcastSetForUpdateFunction(BC_SET_UPDATES_NAME, bcVar);
       
        vertexIteration.setName(ITERATION_NAME);
        vertexIteration.setParallelism(ITERATION_DOP);
       
        vertexIteration.registerAggregator(AGGREGATOR_NAME, new LongSumAggregator());
       
        result = initialVertices.runOperation(vertexIteration);
      }
     
     
View Full Code Here

      headConfig.setDriverComparator(comparator, 1);
      headConfig.setDriverPairComparator(pairComparator);
      headConfig.setRelativeMemoryDriver(MEM_FRAC_PER_CONSUMER);

      headConfig.addIterationAggregator(
        WorksetEmptyConvergenceCriterion.AGGREGATOR_NAME, new LongSumAggregator());
    }

    return head;
  }
View Full Code Here

    AbstractJobVertex sync = JobGraphUtils.createSync(jobGraph, numSubTasks);
    TaskConfig syncConfig = new TaskConfig(sync.getConfiguration());
    syncConfig.setNumberOfIterations(maxIterations);
    syncConfig.setIterationId(ITERATION_ID);
    syncConfig.addIterationAggregator(WorksetEmptyConvergenceCriterion.AGGREGATOR_NAME,
      new LongSumAggregator());
    syncConfig.setConvergenceCriterion(WorksetEmptyConvergenceCriterion.AGGREGATOR_NAME,
      new WorksetEmptyConvergenceCriterion());

    return sync;
  }
View Full Code Here

   
    if (convCriterion != null || convAggName != null) {
      throw new CompilerException("Error: Cannot use custom convergence criterion with workset iteration. Workset iterations have implicit convergence criterion where workset is empty.");
    }
   
    headConfig.addIterationAggregator(WorksetEmptyConvergenceCriterion.AGGREGATOR_NAME, new LongSumAggregator());
    syncConfig.addIterationAggregator(WorksetEmptyConvergenceCriterion.AGGREGATOR_NAME, new LongSumAggregator());
    syncConfig.setConvergenceCriterion(WorksetEmptyConvergenceCriterion.AGGREGATOR_NAME, new WorksetEmptyConvergenceCriterion());
  }
View Full Code Here

        DataSet<Tuple2<Double, String>> initialWorkSet = env.fromElements(new Tuple2<Double, String>(1.23, "abc"));
       
        DeltaIteration<Tuple3<Double, Long, String>, Tuple2<Double, String>> iteration = initialSolutionSet.iterateDelta(initialWorkSet, NUM_ITERATIONS, ITERATION_KEYS);
        iteration.name(ITERATION_NAME).parallelism(ITERATION_DOP);
       
        iteration.registerAggregator(AGGREGATOR_NAME, new LongSumAggregator());
       
        // test that multiple workset consumers are supported
        DataSet<Tuple2<Double, String>> worksetSelfJoin =
          iteration.getWorkset()
            .map(new IdentityMapper<Tuple2<Double,String>>())
View Full Code Here

TOP

Related Classes of org.apache.flink.api.common.aggregators.LongSumAggregator

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.