Package edu.gmu.seor.prognos.unbbayesplugin.jt.prs

Examples of edu.gmu.seor.prognos.unbbayesplugin.jt.prs.Node


            dataPT.remove(linearCoordToKill);
            return;
        }

        int fim = (index == control) ? 1 : 0;
        Node node = variableList.get(control);
        for (int i = node.getStatesSize()-1; i >= fim; i--) {
            coord[control] = i;
            argMax(control-1, index, coord);
        }
    }
View Full Code Here


    double mean = 0.0;
    if (cDistribution.getDiscreteParentList().size() > 0) {
      throw new Exception("Not implemented yet!");
    }
   
    Node node;
    // Since we are supposing we just have continuous nodes as parents.
    int ndfIndex = 0;
    mean += cDistribution.getMean(ndfIndex);
    for (int i = 0; i < cDistribution.getContinuousParentList().size(); i++) {
      node = cDistribution.getContinuousParentList().get(i);
View Full Code Here

    double variance = 0.0;
    if (cDistribution.getDiscreteParentList().size() > 0) {
      throw new Exception("Not implemented yet!");
    }
   
    Node node;
    // Since we are supposing we just have continuous nodes as parents.
    int ndfIndex = 0;
    variance += cDistribution.getVariance(ndfIndex);
    for (int i = 0; i < cDistribution.getContinuousParentList().size(); i++) {
      node = cDistribution.getContinuousParentList().get(i);
View Full Code Here

    // Keeps track of the nodes that have already been added to the queue (nodeAddedList[nodeIndex]=true).
    boolean[] nodeAddedList = new boolean[continuousNodeList.size()];
    initOrderQueue(nodeAddedList);                     
    for(int i = 0; i < nodeOrderQueue.size(); i++){
      // All children of continuous nodes are also continuous.
      Node node = nodeOrderQueue.get(i);
      addToOrderQueue(node.getChildren(), nodeAddedList);     
    }   
  }
View Full Code Here

   * It will put in the queue the nodes that do not have parents.
   * @param nodeAddedList Keeps track of the nodes that have already been added to the queue (nodeAddedList[nodeIndex]=true).
   */
  protected void initOrderQueue(boolean[] nodeAddedList){
    for (int i = 0; i < continuousNodeList.size(); i++) {
      Node node = continuousNodeList.get(i);
      boolean hasContinuousParent = false;
      for (Node parentNode : node.getParents()) {
        if (parentNode.getType() == Node.CONTINUOUS_NODE_TYPE) {
          hasContinuousParent = true;
          break;
        }
      }
View Full Code Here

   * @param children Children of a node that is already in the queue.
   * @param nodeAddedList Nodes that have already been added to the queue.
   */
  protected void addToOrderQueue(ArrayList<Node> children, boolean[] nodeAddedList){
    for(int i = 0 ; i < children.size(); i++){
      Node n1 = children.get(i);
      for(int j = 0 ; j < continuousNodeList.size(); j++){
        Node n2 = continuousNodeList.get(j);
        if(n1.getName().equals(n2.getName())){
          if(!nodeAddedList[j]){
            nodeOrderQueue.add(n1);           
            nodeAddedList[j] = true;           
            break;           
          }                   
View Full Code Here

 

  public void run() throws Exception {
    for(int i = 0; i < nodeOrderQueue.size(); i++) {
     
      Node node = nodeOrderQueue.get(i);
      List<Node> discreteParentList = new ArrayList<Node>();
      List<Node> continuousParentList = new ArrayList<Node>();
      for (Node parentNode : clonedPN.getNode(node.getName()).getParents()) {
        if (parentNode.getType() == Node.PROBABILISTIC_NODE_TYPE) {
          discreteParentList.add(parentNode);
        } else if (parentNode.getType() == Node.CONTINUOUS_NODE_TYPE) {
          continuousParentList.add(parentNode);
        }
      }
      SortUtil.sortNodeListByName(discreteParentList);
      SortUtil.sortNodeListByName(continuousParentList);
     
      // The max of possible networks to be compiled to get its posterior is the
      // number of discrete parents this continuous node has.
      // But there might be two parent nodes in the same PN, in that case the network used
      // will be the same.
      // The purpose of creating a network with all non-continuous nodes connected to a
      // parent of the current continuous node is to come up with its posterior distribution.
      // This is a hybrid approach. We use Junction Tree where possible (discrete nodes) and
      // Weighted Gaussian Sum for the rest (continuous nodes).
      // Initializes all discrete nodes as not visited.
      Map<String, Boolean> nodeVisitedBeforeMap = new HashMap<String, Boolean>();
      for (Node discreteNode : pn.getNodes()) {
        if (discreteNode.getType() == Node.PROBABILISTIC_NODE_TYPE) {
          nodeVisitedBeforeMap.put(discreteNode.getName(), false);
        }
      }
      List<Node> nodeInNetworkList;
      boolean nodeVisitedBefore;
      for (int j = 0; j < discreteParentList.size(); j++) {
        nodeVisitedBefore = nodeVisitedBeforeMap.get(discreteParentList.get(j).getName());
        if (!nodeVisitedBefore) {
          nodeInNetworkList = new ArrayList<Node>();
          addAdjacentNodes(clonedPN.getNode(discreteParentList.get(j).getName()), nodeInNetworkList);
          List<Node> nodeToRemoveList = new ArrayList<Node>();
          for (Node nodeToRemove : clonedPN.getNodes()) {
            if (!nodeInNetworkList.contains(nodeToRemove)) {
              nodeToRemoveList.add(nodeToRemove);
            }
          }
          for (Node nodeToRemove : nodeToRemoveList) {
            clonedPN.removeNode(nodeToRemove);
          }
          // Add the calculated marginal to the initial network (pn).
          // We already know that every node here is discrete.
          clonedPN.compile();
          for (Node nodeToGetMarginal : clonedPN.getNodes()) {
            TreeVariable variableToGetMarginal = (TreeVariable)nodeToGetMarginal;
            TreeVariable variable = (TreeVariable)pn.getNode(nodeToGetMarginal.getName());
            float[] values = new float[variable.getStatesSize()];
            for (int stateIndex = 0; stateIndex < variable.getStatesSize(); stateIndex++) {
              values[stateIndex] = variableToGetMarginal.getMarginalAt(stateIndex);
            }
            variable.initMarginalList();
            variable.addLikeliHood(values);
           
            // Add its name to the list of already visited nodes.
            nodeVisitedBeforeMap.put(nodeToGetMarginal.getName(), true);
          }
          clonedPN = clonePN(this.pn);
        }
      }
     
      // Now we have the posterior of all parents of the current continuous node.
      // Calculate Weighted Gaussian Sum (from Symbolic Probabilistic Inference with both
      // Discrete and Continuous Variables, appendix C)
      // First lets calculate the mean SumOf(Prob[Parents(node)] * PartialMean), for every
      // normal distribution function possible (combination of parents' states).
      CNNormalDistribution cDistribution = ((ContinuousNode)node).getCnNormalDistribution();
      double[] partialMeanList = new double[cDistribution.functionSize()];
      double[] partialVarianceList = new double[cDistribution.functionSize()];
      double[] probabilityList = new double[cDistribution.functionSize()];
      double weightedMean = 0.0;
      for (int ndfIndex = 0; ndfIndex < cDistribution.functionSize(); ndfIndex++) {
        // Each normal distribution function has the mean SumOf(PartialMean), for every normal
        // distribution in the function (one for each continuous parent and one for the noise
        // normal distribution). As each continuous parent distribution is multiplied by a
        // constant, its PartialMean = constant * MeanWithoutConstant.
        // First we add the mean of the noise normal distribution.
        partialMeanList[ndfIndex] = cDistribution.getMean(ndfIndex);
        // Each normal distribution function has the variance SumOf(PartialVariance), for every normal
        // distribution in the function (one for each continuous parent and one for the noise
        // normal distribution). As each continuous parent distribution is multiplied by a
        // constant, its PartialVariance = constant^2 * VarianceWithoutConstant.
        // For the variance, we first add the variance of the noise normal distribution.
        partialVarianceList[ndfIndex] = cDistribution.getVariance(ndfIndex);
        // Then, for each continuous parent we add constant * MeanWithoutConstant for the PartialMean
        // and constant^2 * VarianceWithoutConstant for the PartialVariance.
        double meanWithoutConstant;
        double varianceWithoutConstant;
        for (int parentIndex = 0; parentIndex < cDistribution.getContinuousParentList().size(); parentIndex++) {
          TreeVariable variable = (TreeVariable)cDistribution.getContinuousParentList().get(parentIndex);
          // By the time we get here, the continuous parent already calculated its mean and variance previously.
          meanWithoutConstant = variable.getMarginalAt(ContinuousNode.MEAN_MARGINAL_INDEX);
          varianceWithoutConstant = variable.getMarginalAt(ContinuousNode.VARIANCE_MARGINAL_INDEX);
          partialMeanList[ndfIndex] += cDistribution.getConstantAt(parentIndex, ndfIndex) * meanWithoutConstant;
          partialVarianceList[ndfIndex] += Math.pow(cDistribution.getConstantAt(parentIndex, ndfIndex), 2) * varianceWithoutConstant;
        }
       
        // Now we get the configuration of its parents states to calculate its probability.
        int[] parentsStatesConfiguration = cDistribution.getMultidimensionalCoord(ndfIndex);
        probabilityList[ndfIndex] = 1.0;
        for (int parentIndex = 0; parentIndex < parentsStatesConfiguration.length; parentIndex++) {
          probabilityList[ndfIndex] *= ((TreeVariable)pn.getNode(discreteParentList.get(parentIndex).getName())).getMarginalAt(parentsStatesConfiguration[parentIndex]);
        }
       
        // Finally, calculate the weighted gaussian sum SumOf(Prob[Parents(node)] * PartialMean).
        weightedMean += probabilityList[ndfIndex] * partialMeanList[ndfIndex];
       
        // We can only calculate the weightedVariance after we have the final result
        // for the weightedMean.
      }
     
      // Now that we have the final weightedMean, we can calculate the weightedVariance.
      // WeightedVariance = SumOf(Prob[Parents(node)] * (PartialVariance + PartialMean^2 - WeightedMean^2))
      double weightedVariance = 0.0;
      for (int ndfIndex = 0; ndfIndex < cDistribution.functionSize(); ndfIndex++) {
        weightedVariance += probabilityList[ndfIndex] * (partialVarianceList[ndfIndex] + Math.pow(partialMeanList[ndfIndex], 2) - Math.pow(weightedMean, 2));
      }
     
      // Add the mean and variance as its marginal in the TreeVariable.
      float[] values = new float[node.getStatesSize()];
      values[ContinuousNode.MEAN_MARGINAL_INDEX] = (float)weightedMean;
      values[ContinuousNode.VARIANCE_MARGINAL_INDEX] = (float)weightedVariance;
      ((TreeVariable)node).initMarginalList();
      ((TreeVariable)node).addLikeliHood(values);
    }
View Full Code Here

   */
  protected List<Integer> getParentsIndexesInQueue(ProbabilisticNode node){
    List<Integer> indexes = new ArrayList<Integer>();
    ArrayList<Node> parents = node.getParents();   
    for(int i = 0 ; i < parents.size();i++){
      Node parentNode = parents.get(i);
      indexes.add(getIndexInQueue(parentNode));           
   
    return indexes;   
  }
View Full Code Here

    if (size == 0) {
      factors = new int[1];
    }
 
    factors[0] = 1;
    Node node;
    for (int i = 1; i < size; i++) {
      node = discreteParentList.get(i-1);
      factors[i] = factors[i-1] * node.getStatesSize();
    }
  }
 
View Full Code Here

        if (fixo[control]) {
            return sum(control+1, fixo, coord);
        }

        Node node = nos.get(control);
        float retorno = 0;
        for (int i = 0; i < node.getStatesSize(); i++) {
            coord[control] = i;
            retorno += sum(control+1, fixo, coord);
        }
        return retorno;
    }
View Full Code Here

TOP

Related Classes of edu.gmu.seor.prognos.unbbayesplugin.jt.prs.Node

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.