Examples of TreeVariable


Examples of edu.gmu.seor.prognos.unbbayesplugin.jt.prs.bn.TreeVariable

          }
          // Add the calculated marginal to the initial network (pn).
          // We already know that every node here is discrete.
          clonedPN.compile();
          for (Node nodeToGetMarginal : clonedPN.getNodes()) {
            TreeVariable variableToGetMarginal = (TreeVariable)nodeToGetMarginal;
            TreeVariable variable = (TreeVariable)pn.getNode(nodeToGetMarginal.getName());
            float[] values = new float[variable.getStatesSize()];
            for (int stateIndex = 0; stateIndex < variable.getStatesSize(); stateIndex++) {
              values[stateIndex] = variableToGetMarginal.getMarginalAt(stateIndex);
            }
            variable.initMarginalList();
            variable.addLikeliHood(values);
           
            // Add its name to the list of already visited nodes.
            nodeVisitedBeforeMap.put(nodeToGetMarginal.getName(), true);
          }
          clonedPN = clonePN(this.pn);
        }
      }
     
      // Now we have the posterior of all parents of the current continuous node.
      // Calculate Weighted Gaussian Sum (from Symbolic Probabilistic Inference with both
      // Discrete and Continuous Variables, appendix C)
      // First lets calculate the mean SumOf(Prob[Parents(node)] * PartialMean), for every
      // normal distribution function possible (combination of parents' states).
      CNNormalDistribution cDistribution = ((ContinuousNode)node).getCnNormalDistribution();
      double[] partialMeanList = new double[cDistribution.functionSize()];
      double[] partialVarianceList = new double[cDistribution.functionSize()];
      double[] probabilityList = new double[cDistribution.functionSize()];
      double weightedMean = 0.0;
      for (int ndfIndex = 0; ndfIndex < cDistribution.functionSize(); ndfIndex++) {
        // Each normal distribution function has the mean SumOf(PartialMean), for every normal
        // distribution in the function (one for each continuous parent and one for the noise
        // normal distribution). As each continuous parent distribution is multiplied by a
        // constant, its PartialMean = constant * MeanWithoutConstant.
        // First we add the mean of the noise normal distribution.
        partialMeanList[ndfIndex] = cDistribution.getMean(ndfIndex);
        // Each normal distribution function has the variance SumOf(PartialVariance), for every normal
        // distribution in the function (one for each continuous parent and one for the noise
        // normal distribution). As each continuous parent distribution is multiplied by a
        // constant, its PartialVariance = constant^2 * VarianceWithoutConstant.
        // For the variance, we first add the variance of the noise normal distribution.
        partialVarianceList[ndfIndex] = cDistribution.getVariance(ndfIndex);
        // Then, for each continuous parent we add constant * MeanWithoutConstant for the PartialMean
        // and constant^2 * VarianceWithoutConstant for the PartialVariance.
        double meanWithoutConstant;
        double varianceWithoutConstant;
        for (int parentIndex = 0; parentIndex < cDistribution.getContinuousParentList().size(); parentIndex++) {
          TreeVariable variable = (TreeVariable)cDistribution.getContinuousParentList().get(parentIndex);
          // By the time we get here, the continuous parent already calculated its mean and variance previously.
          meanWithoutConstant = variable.getMarginalAt(ContinuousNode.MEAN_MARGINAL_INDEX);
          varianceWithoutConstant = variable.getMarginalAt(ContinuousNode.VARIANCE_MARGINAL_INDEX);
          partialMeanList[ndfIndex] += cDistribution.getConstantAt(parentIndex, ndfIndex) * meanWithoutConstant;
          partialVarianceList[ndfIndex] += Math.pow(cDistribution.getConstantAt(parentIndex, ndfIndex), 2) * varianceWithoutConstant;
        }
       
        // Now we get the configuration of its parents states to calculate its probability.
View Full Code Here

Examples of unbbayes.prs.bn.TreeVariable

   
    ArrayList<Node> nodes = net.getNodesCopy();
    int size = nodes.size();
    for (int i = 0; i < size; i++) {
      Node node = (Node) nodes.get(i);
      TreeVariable treeVariable = (TreeVariable) node;
      DefaultMutableTreeNode treeNode = null;
     
      //by young
      if( getTextOutputMode() == TEXTOUTPUTMODEMODE_USE_NAME )
        treeNode = findUserObject(node.getName(), root);
      else
      if( getTextOutputMode() == TEXTOUTPUTMODEMODE_USE_DESC )
        treeNode = findUserObject(node.getDescription(), root);
     
      if (treeNode == null)
      {
        if( getTextOutputMode() == TEXTOUTPUTMODEMODE_USE_NAME )
          treeNode = new DefaultMutableTreeNode(node.getName());
        else
        if( getTextOutputMode() == TEXTOUTPUTMODEMODE_USE_DESC )
          treeNode = new DefaultMutableTreeNode(node.getDescription());
       
      
        root.add(treeNode);
      }
     
      objectsMap.put(treeNode, node);
     
      int statesSize = node.getStatesSize();
     
      Debug.println("new tree node "+ treeNode.toString() );
     
      //by Young 2011
      String strEngine = EDB.This().get("ROOT.NET_INFO.CURRENT_ENGINE").getData();
      EDBUnit BEL = EDB.This().get("ROOT.ENGINES." + strEngine + ".NODES." + node.getName() +".BEL");
      EDBUnit type = EDB.This().get("ROOT.ENGINES." + strEngine + ".NODES." + node.getName() +".INFO.TYPE");
      EDBUnit EVIDENCE = EDB.This().get("ROOT.ENGINES." + strEngine + ".NODES." + node.getName() +".EVIDENCE");
      EDBUnit EVIDENCE2 = EDB.This().get("ROOT.NODES." + node.getName() +".EVIDENCE");
      String label;
    
      if( EVIDENCE2 != null && EVIDENCE2.hasChildren() ){
        label = "EVIDENCE" + ": " + EVIDENCE2.getData();
        treeNode.add(new DefaultMutableTreeNode(label));
      }else if( EVIDENCE != null && EVIDENCE.hasChildren() ){
        label = "EVIDENCE" + ": " + EVIDENCE.getData();
        treeNode.add(new DefaultMutableTreeNode(label));
      }else{
        if (treeVariable.getType() == Node.PROBABILISTIC_NODE_TYPE) {
          for (int j = 0; j < statesSize; j++) {
            //BEL.print("BEL ");
            if( treeVariable.hasEvidence() == true ){
              label = node.getStateAt(j) + ": " + nf.format(treeVariable.getMarginalAt(j) * 100.0);
            }
            else{
              BEL = BEL.getNext();
              label = node.getStateAt(j) + ": " + nf.format(BEL.getDataByDouble().floatValue() * 100.0);
            }
 
View Full Code Here

Examples of unbbayes.prs.bn.TreeVariable

    EDBUnit SNodes = EDB.This().get("ROOT.NODES");
      
    //4. create states
    for (int i = 0; i < pn.getNodeCount(); i++) {
      Node UNode = pn.getNodeAt(i);
      TreeVariable tv = (TreeVariable)UNode;
      EDBUnit SNode = SNodes.get(UNode.getName());
     
      if(tv.hasEvidence()) {
        EDBUnit evidence = SNode.create("EVIDENCE")
        String ev = UNode.getStateAt(tv.getEvidence());
        evidence.setData(ev);
      }
    }
     
  }
View Full Code Here

Examples of unbbayes.prs.bn.TreeVariable

  private void treeDoubleClick(DefaultMutableTreeNode treeNode) {
    DefaultMutableTreeNode parent =
      (DefaultMutableTreeNode) ((treeNode).getParent());
    Object obj = objectsMap.get((DefaultMutableTreeNode) parent);
    if (obj != null) {
      TreeVariable node = (TreeVariable) obj;

      //Only propage description nodes
      if (node.getInformationType() == Node.DESCRIPTION_TYPE) {
     
        TreeVariable treeVariable = (TreeVariable) node;
        if (node.getType() == Node.PROBABILISTIC_NODE_TYPE) {
         
          for (int i = 0; i < parent.getChildCount(); i++) {
            DefaultMutableTreeNode auxNode =
              (DefaultMutableTreeNode) parent.getChildAt(i);
View Full Code Here

Examples of unbbayes.prs.bn.TreeVariable

        ProbabilisticNetwork net = (ProbabilisticNetwork) g;
        for(SimpleSSBNNode ssbnFindingNode: ssbn.getFindingList()){
          //Not all findings nodes are at the network.
          if(ssbnFindingNode.getProbNode()!= null){
            // extract node and finding state from the network managed by the algorithm
            TreeVariable node = (TreeVariable)net.getNode(ssbnFindingNode.getProbNode().getName());
            String stateName = ssbnFindingNode.getState().getName();
           
            // TODO avoid usage of instanceof
            // TODO stop using static instance of DMP DB
            if ((node instanceof ContinuousNode)
                || (node instanceof GmmNodePluginStub)) {
              // findings to continuous nodes should be added directly to the DMP database.
              EDBUnit edbNode = EDB.This().get("ROOT.NODES." + node.getName());
                EDBUnit Evidence = edbNode.create("EVIDENCE");
                Evidence.setData(stateName);
            } else {
              // add discrete findings directly to the network managed by the inference algorithm
              // unfortunately, the network managed by the algorithm and the one linked to the SSBN may not be the same (because algorithm may instantiate another network)
              boolean isStateInNode = false;   // indicates if node contains the specified state (true if evidence is to a valid state)
              for(int i = 0; i < node.getStatesSize(); i++){
                // check if the name of the state in the SSBN node is the same in the node in actual network managed by the algorithm
                if(node.getStateAt(i).equals(stateName)){
                  node.addFinding(i);
//                ssbnFindingNode.getProbNode().addFinding(i);  // add to original node as well, just to make sure
                  isStateInNode = true;
                  break;
                }
              }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.