Package org.apache.hadoop.io

Examples of org.apache.hadoop.io.DoubleWritable


  private static void printOutput(FileSystem fileSys, HamaConfiguration conf)
      throws IOException {
    SequenceFile.Reader reader = new SequenceFile.Reader(fileSys, TMP_OUTPUT,
        conf);
    DoubleWritable output = new DoubleWritable();
    DoubleWritable zero = new DoubleWritable();
    reader.next(output, zero);
    reader.close();

    System.out.println("Estimated value of PI is " + output);
  }
View Full Code Here


      FileSystem fileSys = FileSystem.get(conf);

      SequenceFile.Writer writer = SequenceFile.createWriter(fileSys, conf,
          TMP_OUTPUT, DoubleWritable.class, DoubleWritable.class,
          CompressionType.NONE);
      writer.append(new DoubleWritable(pi), new DoubleWritable(0));
      writer.close();
    }
View Full Code Here

        return new LongWritable(vertexId);
    }

    @Override
    public final DoubleWritable getVertexValue() {
        return new DoubleWritable(vertexValue);
    }
View Full Code Here

            return offset < elementList.size();
        }

        @Override
        public DoubleWritable next() {
            return new DoubleWritable(elementList.get(offset++));
        }
View Full Code Here

    TaskAttemptContext tac = mock(TaskAttemptContext.class);
    when(tac.getConfiguration()).thenReturn(conf);

    BasicVertex vertex = mock(BasicVertex.class);
    when(vertex.getVertexId()).thenReturn(new Text("Four Tops"));
    when(vertex.getVertexValue()).thenReturn(new DoubleWritable(4d));

    // Create empty iterator == no edges
    when(vertex.iterator()).thenReturn(new ArrayList<Text>().iterator());

    RecordWriter<Text, Text> tw = mock(RecordWriter.class);
View Full Code Here

        if (getSuperstep() >= 1) {
            double sum = 0;
            while (msgIterator.hasNext()) {
                sum += msgIterator.next().get();
            }
            DoubleWritable vertexValue =
                new DoubleWritable((0.15f / getNumVertices()) + 0.85f * sum);
            setVertexValue(vertexValue);
            maxAggreg.aggregate(vertexValue);
            minAggreg.aggregate(vertexValue);
            sumAggreg.aggregate(1L);
            LOG.info(getVertexId() + ": PageRank=" + vertexValue +
                     " max=" + maxAggreg.getAggregatedValue() +
                     " min=" + minAggreg.getAggregatedValue());
        }

        if (getSuperstep() < MAX_SUPERSTEPS) {
            long edges = getNumOutEdges();
            sendMsgToAllEdges(
                new DoubleWritable(getVertexValue().get() / edges));
        } else {
            voteToHalt();
        }
    }
View Full Code Here

              if (sumAggreg.getAggregatedValue().get() != getNumVertices()) {
                  throw new RuntimeException("wrong value of SumAggreg: " +
                          sumAggreg.getAggregatedValue() + ", should be: " +
                          getNumVertices());
              }
              DoubleWritable maxPagerank =
                      (DoubleWritable) maxAggreg.getAggregatedValue();
              LOG.info("aggregatedMaxPageRank=" + maxPagerank.get());
              DoubleWritable minPagerank =
                      (DoubleWritable) minAggreg.getAggregatedValue();
              LOG.info("aggregatedMinPageRank=" + minPagerank.get());
          }
          useAggregator("sum");
          useAggregator("min");
          useAggregator("max");
          sumAggreg.setAggregatedValue(new LongWritable(0L));
View Full Code Here

    @Override
    public void decodeEdge(String s1, String s2, Edge<Text, DoubleWritable>
            textIntWritableEdge) {
      textIntWritableEdge.setDestVertexId(new Text(s1));
      textIntWritableEdge.setEdgeValue(new DoubleWritable(Double.valueOf(s2)));
    }
View Full Code Here

    assertTrue("Should have been able to read vertex", vr.nextVertex());
    BasicVertex<LongWritable, DoubleWritable, DoubleWritable, BooleanWritable>
        vertex = vr.getCurrentVertex();
    setGraphState(vertex, graphState);
    assertValidVertex(conf, graphState, vertex,
        new LongWritable(42), new DoubleWritable(0.1),
        new Edge<LongWritable, DoubleWritable>(new LongWritable(99), new DoubleWritable(0.2)),
        new Edge<LongWritable, DoubleWritable>(new LongWritable(2000), new DoubleWritable(0.3)),
        new Edge<LongWritable, DoubleWritable>(new LongWritable(4000), new DoubleWritable(0.4)));
    assertEquals(vertex.getNumOutEdges(), 3);
  }
View Full Code Here

            BasicVertex<LongWritable, DoubleWritable, FloatWritable, DoubleWritable>
                vertex = BspUtils.createVertex(configuration);

            LongWritable vertexId = new LongWritable(
                (inputSplit.getSplitIndex() * totalRecords) + recordsRead);
            DoubleWritable vertexValue = new DoubleWritable(vertexId.get() * 10d);
            long destVertexId =
                (vertexId.get() + 1) %
                (inputSplit.getNumSplits() * totalRecords);
            float edgeValue = vertexId.get() * 100f;
            Map<LongWritable, FloatWritable> edges = Maps.newHashMap();
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.DoubleWritable

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.