Package org.data2semantics.proppred.learners

Examples of org.data2semantics.proppred.learners.SparseVector


  public SparseVector[] computeFeatureVectors(RDFDataSet dataset, List<Resource> instances, List<Statement> blackList) {
    SparseVector[] featureVectors = new SparseVector[instances.size()];
    SparseVector[] textFeatureVectors = new SparseVector[instances.size()];
    for (int i = 0; i < featureVectors.length; i++) {
      featureVectors[i] = new SparseVector();
      textFeatureVectors[i] = new SparseVector();
   

    DirectedGraph<Vertex<Map<Integer,StringBuilder>>,Edge<Map<Integer,StringBuilder>>> graph = createGraphFromRDF(dataset, instances, blackList);
    createInstanceIndexMaps(graph, instances);
View Full Code Here


            indexFVMap.put(key, new HashMap<Integer, SparseVector>());         
          }
          if (indexFVMap.get(key).containsKey(i)) {
            indexFVMap.get(key).get(i).sumVector(vertexLiteralMap.get(vertex).get(vertexIndexMap.get(vertex)));
          } else {
            indexFVMap.get(key).put(i, new SparseVector(vertexLiteralMap.get(vertex).get(vertexIndexMap.get(vertex))));
          }   
        }
      }
      edgeIndexMap = instanceEdgeIndexMap.get(instances.get(i).toString());
      for (Edge<Map<Integer,StringBuilder>> edge : edgeIndexMap.keySet()) {
View Full Code Here

    // Compute total kernel
    ret = new SparseVector[instances.size()];

    for (int i = 0; i < ret.length; i++) {
      ret[i] = new SparseVector();
      inst1.get(instances.get(i).getFirst()).multiplyScalar(w1);
      ret[i].addVector(inst1.get(instances.get(i).getFirst()));
      inst2.get(instances.get(i).getSecond()).multiplyScalar(w2);
      ret[i].addVector(inst2.get(instances.get(i).getSecond()));
    }     
View Full Code Here

    List<SparseVector> res = new ArrayList<SparseVector>();
   
    BreakIterator wordIt = BreakIterator.getWordInstance();
   
    for (String text : texts) {
      SparseVector fv = new SparseVector();
      res.add(fv);
     
      wordIt.setText(text);
        int start = wordIt.first();
        int end = wordIt.next();
       
        while (end != BreakIterator.DONE) {
            String word = text.substring(start,end);
            if (Character.isLetterOrDigit(word.charAt(0))) {
              // Get the int key for the word
              Integer key = word2index.get(word);
              if (key == null) {
                key = new Integer(word2index.size()+1);
                word2index.put(word, key);
              }
   
              fv.setValue(key, fv.getValue(key) + 1); // increase count
            }
            start = end;
            end = wordIt.next();
        }       
    }   
View Full Code Here

    }
    return ret;
  }
 
  private SparseVector processVertex(Resource root) {
    SparseVector features = new SparseVector();
    processVertexRec(root, new ArrayList<Integer>(), features, depth, root);
    if (probabilities) {
      features = normalizeFeatures(features);
    }
    return features;
View Full Code Here

    }
    return features;
  }
 
  private SparseVector normalizeFeatures(SparseVector features) {
    SparseVector res = new SparseVector();
    for (int key : features.getIndices()) {
      List<Integer> path = index2path.get(key);
      if (path.size()==0) {
        res.setValue(key, 1.0);
      } else {
        List<Integer> parent = path.subList(0, path.size()-pathLen);
        int parentKey = path2index.get(parent);
        res.setValue(key, features.getValue(key)/features.getValue(parentKey));
      }
    }
    return res;
  }
View Full Code Here

    }
    return ret;
  }
 
  private SparseVector normalizeFeatures(SparseVector features) {
    SparseVector res = new SparseVector();
    for (int key : features.getIndices()) {
      List<Integer> path = index2path.get(key);
      if (path.size()==0) {
        res.setValue(key, 1.0);
      } else {
        List<Integer> parent = path.subList(0, path.size()-pathLen);
        int parentKey = path2index.get(parent);
        res.setValue(key, features.getValue(key)/features.getValue(parentKey));
      }
    }
    return res;
  }
View Full Code Here

    return res;
  }
 
 
  private SparseVector processVertex(Resource root, int fvIndex) {
    SparseVector features = new SparseVector();
    processVertexRec(root, new ArrayList<Integer>(), features, depth, root, fvIndex);
    if (probabilities) {
      features = normalizeFeatures(features);
    }
    return features;
View Full Code Here

  }
 
  private SparseVector[] processTextVertices(List<Resource> instances) {
    SparseVector[] ret = new SparseVector[instances.size()];
    for (int i = 0 ; i < ret.length; i++) {
      ret[i] = new SparseVector();
    }
   
    for (int key : textIndex2index2text.keySet()) {
      int lastIdx = ret[0].getLastIndex();
     
View Full Code Here

    return ret;
  }


  private SparseVector processVertex(Resource root) {
    SparseVector features = new SparseVector();
    processVertexRec(root, new ArrayList<Integer>(), features, depth);
    if (probabilities) {
      features = normalizeFeatures(features);
    }
    return features;
View Full Code Here

TOP

Related Classes of org.data2semantics.proppred.learners.SparseVector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.