Package eu.stratosphere.api.java.record.operators

Examples of eu.stratosphere.api.java.record.operators.ReduceOperator


    FileDataSource source = new FileDataSource(new TextInputFormat(), dataInput, "Input Lines");
    MapOperator mapper = MapOperator.builder(new TokenizeLine())
      .input(source)
      .name("Tokenize Lines")
      .build();
    ReduceOperator reducer = ReduceOperator.builder(CountWords.class, StringValue.class, 0)
      .input(mapper)
      .name("Count Words")
      .build();
   
    @SuppressWarnings("unchecked")
View Full Code Here


      FileDataSource sourceNode = new FileDataSource(new TextInputFormat(), IN_FILE, "Input Lines");
      MapOperator mapNode = MapOperator.builder(new TokenizeLine())
        .input(sourceNode)
        .name("Tokenize Lines")
        .build();
      ReduceOperator reduceNode = ReduceOperator.builder(new CountWords(), StringValue.class, 0)
        .input(mapNode)
        .name("Count Words")
        .build();
      FileDataSink out = new FileDataSink(new CsvOutputFormat(), OUT_FILE, reduceNode, "Word Counts");
      CsvOutputFormat.configureRecordFormat(out)
View Full Code Here

        .input2(edges)
        .name("Join Candidate Id With Neighbor")
        .build();

    // create ReduceOperator for finding the nearest cluster centers
    ReduceOperator minCandidateId = ReduceOperator.builder(new MinimumComponentIDReduce(), LongValue.class, 0)
        .input(joinWithNeighbors)
        .name("Find Minimum Candidate Id")
        .build();
   
    // create CrossOperator for distance computation
View Full Code Here

    // construct the plan

    FileDataSource sourceA = new FileDataSource(new DummyInputFormat(), IN_FILE);
    FileDataSource sourceB = new FileDataSource(new DummyInputFormat(), IN_FILE);
   
    ReduceOperator redA = ReduceOperator.builder(new IdentityReduce(), IntValue.class, 0)
      .input(sourceA)
      .build();
    ReduceOperator redB = ReduceOperator.builder(new IdentityReduce(), IntValue.class, 0)
      .input(sourceB)
      .build();
   
    ReduceOperator globalRed = ReduceOperator.builder(new IdentityReduce(), IntValue.class, 0).build();
    globalRed.addInput(redA);
    globalRed.addInput(redB);
   
    FileDataSink sink = new FileDataSink(new DummyOutputFormat(), OUT_FILE, globalRed);
   
    // return the plan
    Plan plan = new Plan(sink, "Union Property Propagation");
View Full Code Here

    MapOperator mapper = MapOperator.builder(new TokenizeLine())
        .input(source)
        .name("Tokenize Lines")
        .build();
    ReduceOperator reducer = ReduceOperator.builder(CountWords.class, StringValue.class, 0)
        .input(mapper)
        .name("Count Words")
        .build();
    HadoopDataSink<Text, IntWritable> out = new HadoopDataSink<Text, IntWritable>(new TextOutputFormat<Text, IntWritable>(),new JobConf(), "Hadoop TextOutputFormat", reducer, Text.class, IntWritable.class);
    TextOutputFormat.setOutputPath(out.getJobConf(), new Path(output));
View Full Code Here

   
    MapOperator mapper = MapOperator.builder(new TokenizeLine())
      .input(source)
      .name("Tokenize Lines")
      .build();
    ReduceOperator reducer = ReduceOperator.builder(CountWords.class, StringValue.class, 0)
      .input(mapper)
      .name("Count Words")
      .build();
    FileDataSink out = new FileDataSink(new CsvOutputFormat(), output, reducer, "Word Counts");
    CsvOutputFormat.configureRecordFormat(out)
View Full Code Here

        .build();
      JoinOperator mat2 = JoinOperator.builder(new DummyMatchStub(), IntValue.class, 0, 0)
        .input1(ma)
        .input2(mat1)
        .build();
      ReduceOperator r = ReduceOperator.builder(new IdentityReduce(), IntValue.class, 0)
        .input(ma)
        .build();
      CrossOperator c = CrossOperator.builder(new DummyCrossStub())
        .input1(r)
        .input2(mat2)
View Full Code Here

      FileDataSource sourceB = new FileDataSource(new DummyInputFormat(), "file:///test/file2", "Source B");
      FileDataSource sourceC = new FileDataSource(new DummyInputFormat(), "file:///test/file3", "Source C");
     
      MapOperator map1 = MapOperator.builder(new IdentityMap()).input(sourceA).name("Map 1").build();
     
      ReduceOperator reduce1 = ReduceOperator.builder(new IdentityReduce(), IntValue.class, 0)
        .input(map1)
        .name("Reduce 1")
        .build();
     
      @SuppressWarnings("unchecked")
View Full Code Here

        .name("Match 1")
        .build();
     
      MapOperator ma1 = MapOperator.builder(new IdentityMap()).input(mat1).name("Map1").build();
     
      ReduceOperator r1 = ReduceOperator.builder(new IdentityReduce(), IntValue.class, 0)
        .input(ma1)
        .name("Reduce 1")
        .build();
     
      ReduceOperator r2 = ReduceOperator.builder(new IdentityReduce(), IntValue.class, 0)
        .input(mat1)
        .name("Reduce 2")
        .build();
     
      MapOperator ma2 = MapOperator.builder(new IdentityMap()).input(mat1).name("Map 2").build();
View Full Code Here

    MapOperator mappedSource = MapOperator.builder(IdentityMap.class).
        input(source).
        name("Identity mapped source").
        build();

    ReduceOperator reducedSource = ReduceOperator.builder(IdentityReduce.class).
        input(source).
        name("Identity reduce source").
        build();

    DeltaIteration iteration = new DeltaIteration(0,"Loop");
View Full Code Here

TOP

Related Classes of eu.stratosphere.api.java.record.operators.ReduceOperator

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.