Examples of DeepTree


Examples of edu.stanford.nlp.trees.DeepTree

      // For each tree, move in the direction of the gold tree, and
      // move away from the direction of the best scoring hypothesis

      IdentityHashMap<Tree, SimpleMatrix> goldVectors = new IdentityHashMap<Tree, SimpleMatrix>();
      double scoreGold = score(tree, goldVectors);
      DeepTree bestTree = getHighestScoringTree(tree, TRAIN_LAMBDA);
      DeepTree goldTree = new DeepTree(tree, goldVectors, scoreGold);
      return Pair.makePair(goldTree, bestTree);
    }
View Full Code Here

Examples of edu.stanford.nlp.trees.DeepTree

      if (op.trainOptions.useContextWords) {
        Trees.convertToCoreLabels(transformedTree);
        transformedTree.setSpans();
      }
      double score = scorer.score(transformedTree, nodeVectors);
      deepTrees.add(new DeepTree(tree, nodeVectors, score));
      return score;
    }
View Full Code Here

Examples of edu.stanford.nlp.trees.DeepTree

        }
        RerankerQuery reranker = rpq.rerankerQuery();
        if (!(reranker instanceof DVModelReranker.Query)) {
          throw new IllegalArgumentException("Expected a DVModelReranker");
        }
        DeepTree deepTree = ((DVModelReranker.Query) reranker).getDeepTrees().get(0);
        IdentityHashMap<Tree, SimpleMatrix> vectors = deepTree.getVectors();

        for (Map.Entry<Tree, SimpleMatrix> entry : vectors.entrySet()) {
          System.err.println(entry.getKey() + "   " +  entry.getValue());
        }

        FileWriter fout = new FileWriter(outputPath + File.separator + "sentence" + count + ".txt");
        BufferedWriter bout = new BufferedWriter(fout);

        bout.write(Sentence.listToString(sentence));
        bout.newLine();
        bout.write(deepTree.getTree().toString());
        bout.newLine();

        for (HasWord word : sentence) {
          outputMatrix(bout, model.getWordVector(word.word()));
        }
View Full Code Here

Examples of edu.stanford.nlp.trees.DeepTree

        bestScore = scoreHyp;
        bestVectors = nodeVectors;
      }
    }

    DeepTree returnTree = new DeepTree(bestTree,bestVectors,bestScore);
    return returnTree;
  }
View Full Code Here

Examples of edu.stanford.nlp.trees.DeepTree

    }
    wrapper.join();
    scoreTiming.done();
    while (wrapper.peek()) {
      Pair<DeepTree, DeepTree> result = wrapper.poll();
      DeepTree goldTree = result.first;
      DeepTree bestTree = result.second;

      StringBuilder treeDebugLine = new StringBuilder();
      Formatter formatter = new Formatter(treeDebugLine);
      boolean isDone = (Math.abs(bestTree.getScore() - goldTree.getScore()) <= 0.00001 || goldTree.getScore() > bestTree.getScore());
      String done = isDone ? "done" : "";
      formatter.format("Tree %6d Highest tree: %12.4f Correct tree: %12.4f %s", treeNum, bestTree.getScore(), goldTree.getScore(), done);
      System.err.println(treeDebugLine.toString());
      if (!isDone){
        // if the gold tree is better than the best hypothesis tree by
        // a large enough margin, then the score difference will be 0
        // and we ignore the tree

        double valueDelta = bestTree.getScore() - goldTree.getScore();
        //double valueDelta = Math.max(0.0, - scoreGold + bestScore);
        localValue += valueDelta;

        // get the context words for this tree - should be the same
        // for either goldTree or bestTree
        List<String> words = getContextWords(goldTree.getTree());

        // The derivatives affected by this tree are only based on the
        // nodes present in this tree, eg not all matrix derivatives
        // will be affected by this tree
        backpropDerivative(goldTree.getTree(), words, goldTree.getVectors(),
                           binaryW_dfsG, unaryW_dfsG,
                           binaryScoreDerivativesG, unaryScoreDerivativesG,
                           wordVectorDerivativesG);

        backpropDerivative(bestTree.getTree(), words, bestTree.getVectors(),
                           binaryW_dfsB, unaryW_dfsB,
                           binaryScoreDerivativesB, unaryScoreDerivativesB,
                           wordVectorDerivativesB);

      }
View Full Code Here

Examples of edu.stanford.nlp.trees.DeepTree

      }
      RerankingParserQuery rpq = (RerankingParserQuery) parserQuery;
      if (!(rpq.rerankerQuery() instanceof DVModelReranker.Query)) {
        throw new IllegalArgumentException("Expected a LexicalizedParser with a DVModel attached");
      }
      DeepTree tree = ((DVModelReranker.Query) rpq.rerankerQuery()).getDeepTrees().get(0);

      SimpleMatrix rootVector = null;
      for (Map.Entry<Tree, SimpleMatrix> entry : tree.getVectors().entrySet()) {
        if (entry.getKey().label().value().equals("ROOT")) {
          rootVector = entry.getValue();
          break;
        }
      }
      if (rootVector == null) {
        throw new AssertionError("Could not find root nodevector");
      }
      out.write(tokens + "\n");
      out.write(tree.getTree() + "\n");
      for (int i = 0; i < rootVector.getNumElements(); ++i) {
        out.write("  " + rootVector.get(i));
      }
      out.write("\n\n\n");
      count++;
      if (count % 10 == 0) {
        System.err.print("  " + count);
      }

      records.add(new ParseRecord(tokens, goldTree, tree.getTree(), rootVector, tree.getVectors()));
    }
    System.err.println("  done parsing");

    List<Pair<Tree, SimpleMatrix>> subtrees = Generics.newArrayList();
    for (int i = 0; i < records.size(); ++i) {
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.