Package org.apache.hadoop.io

Examples of org.apache.hadoop.io.DoubleWritable


    vr.initialize(null, tac);
    assertTrue("Should have been able to read vertex", vr.nextVertex());
    BasicVertex<Text, DoubleWritable, DoubleWritable, BooleanWritable> vertex =
        vr.getCurrentVertex();
    setGraphState(vertex, graphState);
    assertValidVertex(conf, graphState, vertex, new Text("alpha"), new DoubleWritable(42d),
        new Edge<Text, DoubleWritable>(new Text("beta"), new DoubleWritable(99d)));
    assertEquals(vertex.getNumOutEdges(), 1);
  }
View Full Code Here


    }

    @Override
    public void compute(Iterator<DoubleWritable> msgIterator) {
        if (getSuperstep() == 0) {
            setVertexValue(new DoubleWritable(Double.MAX_VALUE));
        }
        double minDist = isSource() ? 0d : Double.MAX_VALUE;
        while (msgIterator.hasNext()) {
            minDist = Math.min(minDist, msgIterator.next().get());
        }
        if (LOG.isDebugEnabled()) {
            LOG.debug("Vertex " + getVertexId() + " got minDist = " + minDist +
                     " vertex value = " + getVertexValue());
        }
        if (minDist < getVertexValue().get()) {
            setVertexValue(new DoubleWritable(minDist));
            for (LongWritable targetVertexId : this) {
                FloatWritable edgeValue = getEdgeValue(targetVertexId);
                if (LOG.isDebugEnabled()) {
                    LOG.debug("Vertex " + getVertexId() + " sent to " +
                              targetVertexId + " = " +
                              (minDist + edgeValue.get()));
                }
                sendMsg(targetVertexId,
                        new DoubleWritable(minDist + edgeValue.get()));
            }
        }
        voteToHalt();
    }
View Full Code Here

            Text line = getRecordReader().getCurrentValue();
            try {
                JSONArray jsonVertex = new JSONArray(line.toString());
                LongWritable vertexId = new LongWritable(jsonVertex.getLong(0));
                DoubleWritable vertexValue = new DoubleWritable(jsonVertex.getDouble(1));
                Map<LongWritable, FloatWritable> edges = Maps.newHashMap();
                JSONArray jsonEdgeArray = jsonVertex.getJSONArray(2);
                for (int i = 0; i < jsonEdgeArray.length(); ++i) {
                    JSONArray jsonEdge = jsonEdgeArray.getJSONArray(i);
                    edges.put(new LongWritable(jsonEdge.getLong(0)),
View Full Code Here

            if (getSuperstep() >= 1) {
                double sum = 0;
                while (msgIterator.hasNext()) {
                    sum += msgIterator.next().get();
                }
                DoubleWritable vertexValue =
                    new DoubleWritable((0.15f / getNumVertices()) + 0.85f *
                                       sum);
                setVertexValue(vertexValue);
            }

            if (getSuperstep() < getConf().getInt(SUPERSTEP_COUNT, -1)) {
                long edges = getNumOutEdges();
                sendMsgToAllEdges(
                    new DoubleWritable(getVertexValue().get() / edges));
            } else {
                voteToHalt();
            }
        }
View Full Code Here

            if (getSuperstep() >= 1) {
                double sum = 0;
                while (msgIterator.hasNext()) {
                    sum += msgIterator.next().get();
                }
                DoubleWritable vertexValue =
                    new DoubleWritable((0.15f / getNumVertices()) + 0.85f *
                                       sum);
                setVertexValue(vertexValue);
            }

            if (getSuperstep() < getConf().getInt(SUPERSTEP_COUNT, -1)) {
                long edges = getNumOutEdges();
                sendMsgToAllEdges(
                        new DoubleWritable(getVertexValue().get() / edges));
            } else {
                voteToHalt();
            }
        }
View Full Code Here

  public void setAggregatedValue(DoubleWritable value) {
      sum = value.get();
  }

  public DoubleWritable getAggregatedValue() {
      return new DoubleWritable(sum);
  }
View Full Code Here

  public DoubleWritable getAggregatedValue() {
      return new DoubleWritable(sum);
  }

  public DoubleWritable createAggregatedValue() {
      return new DoubleWritable();
  }
View Full Code Here

            long vertexId = startingVertexId + verticesRead;
            // Seed on the vertex id to keep the vertex data the same when
            // on different number of workers, but other parameters are the
            // same.
            Random rand = new Random(vertexId);
            DoubleWritable vertexValue = new DoubleWritable(rand.nextDouble());
            Map<LongWritable, DoubleWritable> edges = Maps.newHashMap();
            for (long i = 0; i < edgesPerVertex; ++i) {
                LongWritable destVertexId = null;
                do {
                    destVertexId =
                        new LongWritable(Math.abs(rand.nextLong()) %
                                         aggregateVertices);
                } while (edges.containsKey(destVertexId));
                edges.put(destVertexId, new DoubleWritable(rand.nextDouble()));
            }
            vertex.initialize(
                new LongWritable(vertexId), vertexValue, edges, null);
            ++verticesRead;
            if (LOG.isDebugEnabled()) {
View Full Code Here

                fs.open(new Path(SimpleAggregatorWriter.filename));
            int i, all;
            for (i = 0; ; i++) {
                all = 0;
                try {
                    DoubleWritable max = new DoubleWritable();
                    max.readFields(input);
                    all++;
                    DoubleWritable min = new DoubleWritable();
                    min.readFields(input);
                    all++;
                    LongWritable sum = new LongWritable();
                    sum.readFields(input);
                    all++;
                    if (i > 0) {
                        assertTrue(max.get() == maxPageRank);
                        assertTrue(min.get() == minPageRank);
                        assertTrue(sum.get() == numVertices);
                    }
                } catch (IOException e) {
                    break;
                }
View Full Code Here

   
    double[] logTotals = new double[state.getNumTopics()];
    Arrays.fill(logTotals, Double.NEGATIVE_INFINITY);
   
    // Output sufficient statistics for each word. == pseudo-log counts.
    DoubleWritable v = new DoubleWritable();
    for (Iterator<Vector.Element> iter = wordCounts.iterateNonZero(); iter.hasNext();) {
      Vector.Element e = iter.next();
      int w = e.index();
     
      for (int k = 0; k < state.getNumTopics(); ++k) {
        v.set(doc.phi(k, w) + Math.log(e.get()));
       
        IntPairWritable kw = new IntPairWritable(k, w);
       
        // ouput (topic, word)'s logProb contribution
        context.write(kw, v);
        logTotals[k] = LDAUtil.logSum(logTotals[k], v.get());
      }
    }
   
    // Output the totals for the statistics. This is to make
    // normalizing a lot easier.
    for (int k = 0; k < state.getNumTopics(); ++k) {
      IntPairWritable kw = new IntPairWritable(k, LDADriver.TOPIC_SUM_KEY);
      v.set(logTotals[k]);
      assert !Double.isNaN(v.get());
      context.write(kw, v);
    }
    IntPairWritable llk = new IntPairWritable(LDADriver.LOG_LIKELIHOOD_KEY, LDADriver.LOG_LIKELIHOOD_KEY);
    // Output log-likelihoods.
    v.set(doc.getLogLikelihood());
    context.write(llk, v);
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.DoubleWritable

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.