Examples of ExecutionEnvironment


Examples of org.apache.flink.api.java.ExecutionEnvironment

public class BroadcastVarInitializationITCase extends JavaProgramTestBase {
 
  @Override
  protected void testProgram() throws Exception {
   
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    env.setDegreeOfParallelism(4);
   
    DataSet<Integer> data = env.fromElements(1, 2, 3, 4, 5, 6, 7, 8);
   
    IterativeDataSet<Integer> iteration = data.iterate(10);
   
    DataSet<Integer> result = data.reduceGroup(new PickOneAllReduce()).withBroadcastSet(iteration, "bc");
   
    final List<Integer> resultList = new ArrayList<Integer>();
    iteration.closeWith(result).output(new LocalCollectionOutputFormat<Integer>(resultList));
   
    env.execute();
   
    Assert.assertEquals(8, resultList.get(0).intValue());
  }
View Full Code Here

Examples of org.apache.flink.api.java.ExecutionEnvironment

       
        /*
         * CoGroup on tuples with key field selector
         */
       
        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
       
        DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds = CollectionDataSets.get5TupleDataSet(env);
        DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds2 = CollectionDataSets.get5TupleDataSet(env);
        DataSet<Tuple2<Integer, Integer>> coGroupDs = ds.coGroup(ds2).where(0).equalTo(0).with(new Tuple5CoGroup());
       
        coGroupDs.writeAsCsv(resultPath);
        env.execute();
       
        // return expected result
        return "1,0\n" +
            "2,6\n" +
            "3,24\n" +
            "4,60\n" +
            "5,120\n";
      }
      case 2: {
       
        /*
         * CoGroup on two custom type inputs with key extractors
         */
       
        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
       
        DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
        DataSet<CustomType> ds2 = CollectionDataSets.getCustomTypeDataSet(env);
        DataSet<CustomType> coGroupDs = ds.coGroup(ds2).where(new KeySelector<CustomType, Integer>() {
                  private static final long serialVersionUID = 1L;
                  @Override
                  public Integer getKey(CustomType in) {
                    return in.myInt;
                  }
                }).equalTo(new KeySelector<CustomType, Integer>() {
                  private static final long serialVersionUID = 1L;
                  @Override
                  public Integer getKey(CustomType in) {
                    return in.myInt;
                  }
                }).with(new CustomTypeCoGroup());
       
        coGroupDs.writeAsText(resultPath);
        env.execute();
       
        // return expected result
        return "1,0,test\n" +
            "2,6,test\n" +
            "3,24,test\n" +
            "4,60,test\n" +
            "5,120,test\n" +
            "6,210,test\n";
      }
      case 3: {
       
        /*
         * check correctness of cogroup if UDF returns left input objects
         */
       
        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
       
        DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env);
        DataSet<Tuple3<Integer, Long, String>> ds2 = CollectionDataSets.get3TupleDataSet(env);
        DataSet<Tuple3<Integer, Long, String>> coGroupDs = ds.coGroup(ds2).where(0).equalTo(0).with(new Tuple3ReturnLeft());
       
        coGroupDs.writeAsCsv(resultPath);
        env.execute();
       
        // return expected result
        return "1,1,Hi\n" +
            "2,2,Hello\n" +
            "3,2,Hello world\n" +
            "4,3,Hello world, how are you?\n" +
            "5,3,I am fine.\n";
       
      }
      case 4: {
       
        /*
         * check correctness of cogroup if UDF returns right input objects
         */
       
        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
       
        DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds = CollectionDataSets.get5TupleDataSet(env);
        DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds2 = CollectionDataSets.get5TupleDataSet(env);
        DataSet<Tuple5<Integer, Long, Integer, String, Long>> coGroupDs = ds.coGroup(ds2).where(0).equalTo(0).with(new Tuple5ReturnRight());
       
        coGroupDs.writeAsCsv(resultPath);
        env.execute();
       
        // return expected result
        return "1,1,0,Hallo,1\n" +
            "2,2,1,Hallo Welt,2\n" +
            "2,3,2,Hallo Welt wie,1\n" +
            "3,4,3,Hallo Welt wie gehts?,2\n" +
            "3,5,4,ABC,2\n" +
            "3,6,5,BCD,3\n";
       
      }
      case 5: {
       
        /*
         * Reduce with broadcast set
         */
       
        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
       
        DataSet<Integer> intDs = CollectionDataSets.getIntegerDataSet(env);
       
        DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds = CollectionDataSets.get5TupleDataSet(env);
        DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds2 = CollectionDataSets.get5TupleDataSet(env);
        DataSet<Tuple3<Integer, Integer, Integer>> coGroupDs = ds.coGroup(ds2).where(0).equalTo(0).with(new Tuple5CoGroupBC()).withBroadcastSet(intDs, "ints");
       
        coGroupDs.writeAsCsv(resultPath);
        env.execute();
       
        // return expected result
        return "1,0,55\n" +
            "2,6,55\n" +
            "3,24,55\n" +
            "4,60,55\n" +
            "5,120,55\n";
      }
      case 6: {
       
        /*
         * CoGroup on a tuple input with key field selector and a custom type input with key extractor
         */
       
        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
       
        DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds = CollectionDataSets.get5TupleDataSet(env);
        DataSet<CustomType> ds2 = CollectionDataSets.getCustomTypeDataSet(env);
        DataSet<Tuple3<Integer, Long, String>> coGroupDs = ds.coGroup(ds2).where(2).equalTo(new KeySelector<CustomType, Integer>() {
                  private static final long serialVersionUID = 1L;
                  @Override
                  public Integer getKey(CustomType in) {
                    return in.myInt;
                  }
                }).with(new MixedCoGroup());
       
        coGroupDs.writeAsCsv(resultPath);
        env.execute();
       
        // return expected result
        return "0,1,test\n" +
            "1,2,test\n" +
            "2,5,test\n" +
            "3,15,test\n" +
            "4,33,test\n" +
            "5,63,test\n" +
            "6,109,test\n" +
            "7,4,test\n" +
            "8,4,test\n" +
            "9,4,test\n" +
            "10,5,test\n" +
            "11,5,test\n" +
            "12,5,test\n" +
            "13,5,test\n" +
            "14,5,test\n";
           
      }
      case 7: {
       
        /*
         * CoGroup on a tuple input with key field selector and a custom type input with key extractor
         */
       
        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
       
        DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds = CollectionDataSets.get5TupleDataSet(env);
        DataSet<CustomType> ds2 = CollectionDataSets.getCustomTypeDataSet(env);
        DataSet<CustomType> coGroupDs = ds2.coGroup(ds).where(new KeySelector<CustomType, Integer>() {
                  private static final long serialVersionUID = 1L;
                  @Override
                  public Integer getKey(CustomType in) {
                    return in.myInt;
                  }
                }).equalTo(2).with(new MixedCoGroup2());
       
        coGroupDs.writeAsText(resultPath);
        env.execute();
       
        // return expected result
        return "0,1,test\n" +
            "1,2,test\n" +
            "2,5,test\n" +
            "3,15,test\n" +
            "4,33,test\n" +
            "5,63,test\n" +
            "6,109,test\n" +
            "7,4,test\n" +
            "8,4,test\n" +
            "9,4,test\n" +
            "10,5,test\n" +
            "11,5,test\n" +
            "12,5,test\n" +
            "13,5,test\n" +
            "14,5,test\n";
       
      }
      case 8: {
        /*
         * CoGroup with multiple key fields
         */
       
        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
       
        DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds1 = CollectionDataSets.get5TupleDataSet(env);
        DataSet<Tuple3<Integer, Long, String>> ds2 = CollectionDataSets.get3TupleDataSet(env);
       
        DataSet<Tuple3<Integer, Long, String>> coGrouped = ds1.coGroup(ds2).
            where(0,4).equalTo(0,1).with(new Tuple5Tuple3CoGroup());
       
        coGrouped.writeAsCsv(resultPath);
        env.execute();
       
        return "1,1,Hallo\n" +
            "2,2,Hallo Welt\n" +
            "3,2,Hallo Welt wie gehts?\n" +
            "3,2,ABC\n" +
            "5,3,HIJ\n" +
            "5,3,IJK\n";
      }
      case 9: {
        /*
         * CoGroup with multiple key fields
         */
       
        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
       
        DataSet<Tuple5<Integer, Long, Integer, String, Long>> ds1 = CollectionDataSets.get5TupleDataSet(env);
        DataSet<Tuple3<Integer, Long, String>> ds2 = CollectionDataSets.get3TupleDataSet(env);
       
        DataSet<Tuple3<Integer, Long, String>> coGrouped = ds1.coGroup(ds2).
            where(new KeySelector<Tuple5<Integer,Long,Integer,String,Long>, Tuple2<Integer, Long>>() {
              private static final long serialVersionUID = 1L;
             
              @Override
              public Tuple2<Integer, Long> getKey(Tuple5<Integer,Long,Integer,String,Long> t) {
                return new Tuple2<Integer, Long>(t.f0, t.f4);
              }
            }).
            equalTo(new KeySelector<Tuple3<Integer,Long,String>, Tuple2<Integer, Long>>() {
              private static final long serialVersionUID = 1L;
             
              @Override
              public Tuple2<Integer, Long> getKey(Tuple3<Integer,Long,String> t) {
                return new Tuple2<Integer, Long>(t.f0, t.f1);
              }
            }).with(new Tuple5Tuple3CoGroup());
       
        coGrouped.writeAsCsv(resultPath);
        env.execute();
       
        return "1,1,Hallo\n" +
            "2,2,Hallo Welt\n" +
            "3,2,Hallo Welt wie gehts?\n" +
            "3,2,ABC\n" +
            "5,3,HIJ\n" +
            "5,3,IJK\n";
      }
      case 10: {
        /*
         * CoGroup on two custom type inputs using expression keys
         */
       
        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
       
        DataSet<CustomType> ds = CollectionDataSets.getCustomTypeDataSet(env);
        DataSet<CustomType> ds2 = CollectionDataSets.getCustomTypeDataSet(env);
        DataSet<CustomType> coGroupDs = ds.coGroup(ds2).where("myInt").equalTo("myInt").with(new CustomTypeCoGroup());
       
        coGroupDs.writeAsText(resultPath);
        env.execute();
       
        // return expected result
        return "1,0,test\n" +
            "2,6,test\n" +
            "3,24,test\n" +
            "4,60,test\n" +
            "5,120,test\n" +
            "6,210,test\n";
      }
      case 11: {
        /*
         * CoGroup on two custom type inputs using expression keys
         */
       
        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
       
        DataSet<POJO> ds = CollectionDataSets.getSmallPojoDataSet(env);
        DataSet<Tuple7<Integer, String, Integer, Integer, Long, String, Long>> ds2 = CollectionDataSets.getSmallTuplebasedDataSet(env);
        DataSet<CustomType> coGroupDs = ds.coGroup(ds2)
            .where("nestedPojo.longNumber").equalTo(6).with(new CoGroupFunction<POJO, Tuple7<Integer, String, Integer, Integer, Long, String, Long>, CustomType>() {
            private static final long serialVersionUID = 1L;

            @Override
            public void coGroup(
                Iterable<POJO> first,
                Iterable<Tuple7<Integer, String, Integer, Integer, Long, String, Long>> second,
                Collector<CustomType> out) throws Exception {
              for(POJO p : first) {
                for(Tuple7<Integer, String, Integer, Integer, Long, String, Long> t: second) {
                  Assert.assertTrue(p.nestedPojo.longNumber == t.f6);
                  out.collect(new CustomType(-1, p.nestedPojo.longNumber, "Flink"));
                }
              }
            }
        });
        coGroupDs.writeAsText(resultPath);
        env.execute();
       
        // return expected result
        return   "-1,20000,Flink\n" +
            "-1,10000,Flink\n" +
            "-1,30000,Flink\n";
      }
      case 12: {
        /*
         * CoGroup field-selector (expression keys) + key selector function
         * The key selector is unnecessary complicated (Tuple1) ;)
         */
       
        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
       
        DataSet<POJO> ds = CollectionDataSets.getSmallPojoDataSet(env);
        DataSet<Tuple7<Integer, String, Integer, Integer, Long, String, Long>> ds2 = CollectionDataSets.getSmallTuplebasedDataSet(env);
        DataSet<CustomType> coGroupDs = ds.coGroup(ds2)
            .where(new KeySelector<POJO, Tuple1<Long>>() {
              private static final long serialVersionUID = 1L;

              @Override
              public Tuple1<Long> getKey(POJO value)
                  throws Exception {
                return new Tuple1<Long>(value.nestedPojo.longNumber);
              }
            }).equalTo(6).with(new CoGroupFunction<POJO, Tuple7<Integer, String, Integer, Integer, Long, String, Long>, CustomType>() {
              private static final long serialVersionUID = 1L;

            @Override
            public void coGroup(
                Iterable<POJO> first,
                Iterable<Tuple7<Integer, String, Integer, Integer, Long, String, Long>> second,
                Collector<CustomType> out) throws Exception {
              for(POJO p : first) {
                for(Tuple7<Integer, String, Integer, Integer, Long, String, Long> t: second) {
                  Assert.assertTrue(p.nestedPojo.longNumber == t.f6);
                  out.collect(new CustomType(-1, p.nestedPojo.longNumber, "Flink"));
                }
              }
            }
        });
        coGroupDs.writeAsText(resultPath);
        env.execute();
       
        // return expected result
        return   "-1,20000,Flink\n" +
            "-1,10000,Flink\n" +
            "-1,30000,Flink\n";
      }
      case 13: {
        /*
         * CoGroup field-selector (expression keys) + key selector function
         * The key selector is simple here
         */
       
        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
       
        DataSet<POJO> ds = CollectionDataSets.getSmallPojoDataSet(env);
        DataSet<Tuple7<Integer, String, Integer, Integer, Long, String, Long>> ds2 = CollectionDataSets.getSmallTuplebasedDataSet(env);
        DataSet<CustomType> coGroupDs = ds.coGroup(ds2)
            .where(new KeySelector<POJO, Long>() {
              private static final long serialVersionUID = 1L;

              @Override
              public Long getKey(POJO value)
                  throws Exception {
                return value.nestedPojo.longNumber;
              }
            }).equalTo(6).with(new CoGroupFunction<POJO, Tuple7<Integer, String, Integer, Integer, Long, String, Long>, CustomType>() {
              private static final long serialVersionUID = 1L;

            @Override
            public void coGroup(
                Iterable<POJO> first,
                Iterable<Tuple7<Integer, String, Integer, Integer, Long, String, Long>> second,
                Collector<CustomType> out) throws Exception {
              for(POJO p : first) {
                for(Tuple7<Integer, String, Integer, Integer, Long, String, Long> t: second) {
                  Assert.assertTrue(p.nestedPojo.longNumber == t.f6);
                  out.collect(new CustomType(-1, p.nestedPojo.longNumber, "Flink"));
                }
              }
            }
        });
        coGroupDs.writeAsText(resultPath);
        env.execute();
       
        // return expected result
        return   "-1,20000,Flink\n" +
            "-1,10000,Flink\n" +
            "-1,30000,Flink\n";
View Full Code Here

Examples of org.apache.flink.api.java.ExecutionEnvironment

  }


  @Override
  protected void testProgram() throws Exception {
    ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

    DataSet<Tuple3<String, Integer, String>> input = env.readCsvFile(inputPath)
      .fieldDelimiter('|')
      .types(String.class, Integer.class, String.class);

    //output the data with AvroOutputFormat for specific user type
    DataSet<User> specificUser = input.map(new ConvertToUser());
    specificUser.write(new AvroOutputFormat<User>(User.class), outputPath1);

    //output the data with AvroOutputFormat for reflect user type
    DataSet<ReflectiveUser> reflectiveUser = specificUser.map(new ConvertToReflective());
    reflectiveUser.write(new AvroOutputFormat<ReflectiveUser>(ReflectiveUser.class), outputPath2);

    env.execute();
  }
View Full Code Here

Examples of org.apache.flink.api.java.ExecutionEnvironment

    try {
      InputFormat<MyAvroType, ?> format = new AvroInputFormat<MyAvroType>(new Path("file:///ignore/this/file"), MyAvroType.class);

      TypeInformation<?> typeInfoDirect = TypeExtractor.getInputFormatTypes(format);

      ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
      DataSet<MyAvroType> input = env.createInput(format);
      TypeInformation<?> typeInfoDataSet = input.getType();


      Assert.assertTrue(typeInfoDirect instanceof PojoTypeInfo);
      Assert.assertTrue(typeInfoDataSet instanceof PojoTypeInfo);
View Full Code Here

Examples of org.apache.flink.api.java.ExecutionEnvironment

public class JDBCExample {

  public static void main(String[] args) throws Exception {
    prepareTestDb();

    ExecutionEnvironment environment = ExecutionEnvironment.getExecutionEnvironment();
    DataSet<Tuple5> source
        = environment.createInput(JDBCInputFormat.buildJDBCInputFormat()
            .setDrivername("org.apache.derby.jdbc.EmbeddedDriver")
            .setDBUrl("jdbc:derby:memory:ebookshop")
            .setQuery("select * from books")
            .finish(),
            new TupleTypeInfo(Tuple5.class, INT_TYPE_INFO, STRING_TYPE_INFO, STRING_TYPE_INFO, DOUBLE_TYPE_INFO, INT_TYPE_INFO)
        );

    source.output(JDBCOutputFormat.buildJDBCOutputFormat()
        .setDrivername("org.apache.derby.jdbc.EmbeddedDriver")
        .setDBUrl("jdbc:derby:memory:ebookshop")
        .setQuery("insert into newbooks (id,title,author,price,qty) values (?,?,?,?,?)")
        .finish());
    environment.execute();
  }
View Full Code Here

Examples of org.apache.flink.api.java.ExecutionEnvironment

    private static final long componentId = 1l;
    private static long [] aggr_value = new long [MAX_ITERATIONS];

    public static String runProgram(String resultPath) throws Exception {

      final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
      env.setDegreeOfParallelism(DOP);

      DataSet<Tuple2<Long, Long>> initialSolutionSet = env.fromCollection(verticesInput);
      DataSet<Tuple2<Long, Long>> edges = env.fromCollection(edgesInput);

      IterativeDataSet<Tuple2<Long, Long>> iteration =
          initialSolutionSet.iterate(MAX_ITERATIONS);

      // register the aggregator
      iteration.registerAggregator(ELEMENTS_IN_COMPONENT, new LongSumAggregatorWithParameter(componentId));

      DataSet<Tuple2<Long, Long>> verticesWithNewComponents = iteration.join(edges).where(0).equalTo(0)
          .with(new NeighborWithComponentIDJoin())
          .groupBy(0).reduceGroup(new MinimumReduce());

      DataSet<Tuple2<Long, Long>> updatedComponentId =
          verticesWithNewComponents.join(iteration).where(0).equalTo(0)
          .flatMap(new MinimumIdFilter());

      iteration.closeWith(updatedComponentId).writeAsText(resultPath);

      env.execute();

      return resultPath;
    }
View Full Code Here

Examples of org.apache.flink.api.java.ExecutionEnvironment

    private static final String UPDATED_ELEMENTS = "updated.elements.aggr";
    private static final long convergence_threshold = 3; // the iteration stops if less than this number os elements change value

    public static String runProgram(String resultPath) throws Exception {

      final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
      env.setDegreeOfParallelism(DOP);

      DataSet<Tuple2<Long, Long>> initialSolutionSet = env.fromCollection(verticesInput);
      DataSet<Tuple2<Long, Long>> edges = env.fromCollection(edgesInput);

      IterativeDataSet<Tuple2<Long, Long>> iteration =
          initialSolutionSet.iterate(MAX_ITERATIONS);

      // register the convergence criterion
      iteration.registerAggregationConvergenceCriterion(UPDATED_ELEMENTS,
          new LongSumAggregator(), new UpdatedElementsConvergenceCriterion(convergence_threshold));

      DataSet<Tuple2<Long, Long>> verticesWithNewComponents = iteration.join(edges).where(0).equalTo(0)
          .with(new NeighborWithComponentIDJoin())
          .groupBy(0).reduceGroup(new MinimumReduce());

      DataSet<Tuple2<Long, Long>> updatedComponentId =
          verticesWithNewComponents.join(iteration).where(0).equalTo(0)
          .flatMap(new MinimumIdFilter());

      iteration.closeWith(updatedComponentId).writeAsText(resultPath);

      env.execute();

      return resultPath;
    }
View Full Code Here

Examples of org.apache.flink.api.java.ExecutionEnvironment

      case 1: {
        /*
         * Test aggregator without parameter for iterate
         */

        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        env.setDegreeOfParallelism(DOP);

        DataSet<Integer> initialSolutionSet = CollectionDataSets.getIntegerDataSet(env);
        IterativeDataSet<Integer> iteration = initialSolutionSet.iterate(MAX_ITERATIONS);

        // register aggregator
        LongSumAggregator aggr = new LongSumAggregator();
        iteration.registerAggregator(NEGATIVE_ELEMENTS_AGGR, aggr);
       
        // register convergence criterion
        iteration.registerAggregationConvergenceCriterion(NEGATIVE_ELEMENTS_AGGR, aggr,
            new NegativeElementsConvergenceCriterion());
       
        DataSet<Integer> updatedDs = iteration.map(new SubtractOneMap());
        iteration.closeWith(updatedDs).writeAsText(resultPath);
        env.execute();

        // return expected result
        return "-3\n" + "-2\n" + "-2\n" + "-1\n" + "-1\n"
             + "-1\n" + "0\n" + "0\n" + "0\n" + "0\n"
             + "1\n" + "1\n" + "1\n" + "1\n" + "1\n";
      }
      case 2: {
        /*
         * Test aggregator with parameter for iterate
         */
       
        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        env.setDegreeOfParallelism(DOP);

        DataSet<Integer> initialSolutionSet = CollectionDataSets.getIntegerDataSet(env);
        IterativeDataSet<Integer> iteration = initialSolutionSet.iterate(MAX_ITERATIONS);

        // register aggregator
        LongSumAggregatorWithParameter aggr = new LongSumAggregatorWithParameter(0);
        iteration.registerAggregator(NEGATIVE_ELEMENTS_AGGR, aggr);
       
        // register convergence criterion
        iteration.registerAggregationConvergenceCriterion(NEGATIVE_ELEMENTS_AGGR, aggr,
            new NegativeElementsConvergenceCriterion());
       
        DataSet<Integer> updatedDs = iteration.map(new SubtractOneMapWithParam());
        iteration.closeWith(updatedDs).writeAsText(resultPath);
        env.execute();
       
        // return expected result
        return "-3\n" + "-2\n" + "-2\n" + "-1\n" + "-1\n"
             + "-1\n" + "0\n" + "0\n" + "0\n" + "0\n"
             + "1\n" + "1\n" + "1\n" + "1\n" + "1\n";
      }
      case 3: {
        /*
         * Test convergence criterion with parameter for iterate
         */
       
        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        env.setDegreeOfParallelism(DOP);

        DataSet<Integer> initialSolutionSet = CollectionDataSets.getIntegerDataSet(env);
        IterativeDataSet<Integer> iteration = initialSolutionSet.iterate(MAX_ITERATIONS);

        // register aggregator
        LongSumAggregator aggr = new LongSumAggregator();
        iteration.registerAggregator(NEGATIVE_ELEMENTS_AGGR, aggr);
       
        // register convergence criterion
        iteration.registerAggregationConvergenceCriterion(NEGATIVE_ELEMENTS_AGGR, aggr,
            new NegativeElementsConvergenceCriterionWithParam(3));
       
        DataSet<Integer> updatedDs = iteration.map(new SubtractOneMap());
        iteration.closeWith(updatedDs).writeAsText(resultPath);
        env.execute();
       
        // return expected result
        return "-3\n" + "-2\n" + "-2\n" + "-1\n" + "-1\n"
             + "-1\n" + "0\n" + "0\n" + "0\n" + "0\n"
             + "1\n" + "1\n" + "1\n" + "1\n" + "1\n";
      }
      case 4: {
        /*
         * Test aggregator without parameter for iterateDelta
         */
       
        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        env.setDegreeOfParallelism(DOP);
       
        DataSet<Tuple2<Integer, Integer>> initialSolutionSet = CollectionDataSets.getIntegerDataSet(env).map(new TupleMakerMap());
           
        DeltaIteration<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> iteration = initialSolutionSet.iterateDelta(
            initialSolutionSet, MAX_ITERATIONS, 0);

        // register aggregator
        LongSumAggregator aggr = new LongSumAggregator();
        iteration.registerAggregator(NEGATIVE_ELEMENTS_AGGR, aggr);
       
        DataSet<Tuple2<Integer, Integer>> updatedDs = iteration.getWorkset().map(new AggregateMapDelta());
       
        DataSet<Tuple2<Integer, Integer>> newElements = updatedDs.join(iteration.getSolutionSet())
            .where(0).equalTo(0).flatMap(new UpdateFilter());
       
        DataSet<Tuple2<Integer, Integer>> iterationRes = iteration.closeWith(newElements, newElements);
        DataSet<Integer> result = iterationRes.map(new ProjectSecondMapper());
        result.writeAsText(resultPath);
       
        env.execute();
       
        // return expected result
        return "1\n" + "2\n" + "2\n" + "3\n" + "3\n"
             + "3\n" + "4\n" + "4\n" + "4\n" + "4\n"
             + "5\n" + "5\n" + "5\n" + "5\n" + "5\n";
       
      }
      case 5: {
        /*
         * Test aggregator with parameter for iterateDelta
         */
       
        final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
        env.setDegreeOfParallelism(DOP);
       
        DataSet<Tuple2<Integer, Integer>> initialSolutionSet = CollectionDataSets.getIntegerDataSet(env).map(new TupleMakerMap());
           
        DeltaIteration<Tuple2<Integer, Integer>, Tuple2<Integer, Integer>> iteration = initialSolutionSet.iterateDelta(
            initialSolutionSet, MAX_ITERATIONS, 0);

        // register aggregator
        LongSumAggregator aggr = new LongSumAggregatorWithParameter(4);
        iteration.registerAggregator(NEGATIVE_ELEMENTS_AGGR, aggr);
       
        DataSet<Tuple2<Integer, Integer>> updatedDs = iteration.getWorkset().map(new AggregateMapDelta());
       
        DataSet<Tuple2<Integer, Integer>> newElements = updatedDs.join(iteration.getSolutionSet())
            .where(0).equalTo(0).flatMap(new UpdateFilter());
       
        DataSet<Tuple2<Integer, Integer>> iterationRes = iteration.closeWith(newElements, newElements);
        DataSet<Integer> result = iterationRes.map(new ProjectSecondMapper());
        result.writeAsText(resultPath);
       
        env.execute();
       
        // return expected result
        return "1\n" + "2\n" + "2\n" + "3\n" + "3\n"
             + "3\n" + "4\n" + "4\n" + "4\n" + "4\n"
             + "5\n" + "5\n" + "5\n" + "5\n" + "5\n";
View Full Code Here

Examples of org.apache.flink.api.java.ExecutionEnvironment

   
  @Test
  public void testMultiSolutionSetJoinPlan() {
    try {
     
      ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
     
      @SuppressWarnings("unchecked")
      DataSet<Tuple2<Long, Double>> inputData = env.fromElements(new Tuple2<Long, Double>(1L, 1.0));
      DataSet<Tuple2<Long, Double>> result = constructPlan(inputData, 10);
     
      // add two sinks, to test the case of branching after an iteration
      result.print();
      result.print();
   
      Plan p = env.createProgramPlan();
     
      OptimizedPlan optPlan = compileNoStats(p);
     
      OptimizerPlanNodeResolver or = getOptimizerPlanNodeResolver(optPlan);
     
View Full Code Here

Examples of org.apache.flink.api.java.ExecutionEnvironment

public class PageRankCompilerTest extends CompilerTestBase{
 
  @Test
  public void testPageRank() {
    try {
      final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
     
      // get input data
      DataSet<Long> pagesInput = env.fromElements(1l);
      @SuppressWarnings("unchecked")
      DataSet<Tuple2<Long, Long>> linksInput =env.fromElements(new Tuple2<Long, Long>(1l, 2l));
     
      // assign initial rank to pages
      DataSet<Tuple2<Long, Double>> pagesWithRanks = pagesInput.
          map(new RankAssigner((1.0d / 10)));
     
      // build adjacency list from link input
      DataSet<Tuple2<Long, Long[]>> adjacencyListInput =
          linksInput.groupBy(0).reduceGroup(new BuildOutgoingEdgeList());
     
      // set iterative data set
      IterativeDataSet<Tuple2<Long, Double>> iteration = pagesWithRanks.iterate(10);
     
      Configuration cfg = new Configuration();
      cfg.setString(PactCompiler.HINT_LOCAL_STRATEGY, PactCompiler.HINT_LOCAL_STRATEGY_HASH_BUILD_SECOND);
     
      DataSet<Tuple2<Long, Double>> newRanks = iteration
          // join pages with outgoing edges and distribute rank
          .join(adjacencyListInput).where(0).equalTo(0).withParameters(cfg)
          .flatMap(new JoinVertexWithEdgesMatch())
          // collect and sum ranks
          .groupBy(0).aggregate(SUM, 1)
          // apply dampening factor
          .map(new Dampener(0.85, 10));
     
      DataSet<Tuple2<Long, Double>> finalPageRanks = iteration.closeWith(
          newRanks,
          newRanks.join(iteration).where(0).equalTo(0)
          // termination condition
          .filter(new EpsilonFilter()));
 
      finalPageRanks.print();
 
      // get the plan and compile it
      Plan p = env.createProgramPlan();
      OptimizedPlan op = compileNoStats(p);
     
      SinkPlanNode sinkPlanNode = (SinkPlanNode) op.getDataSinks().iterator().next();
      BulkIterationPlanNode iterPlanNode = (BulkIterationPlanNode) sinkPlanNode.getInput().getSource();
     
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.