Package uk.ac.cam.ch.wwmm.oscar3.recogniser.test

Examples of uk.ac.cam.ch.wwmm.oscar3.recogniser.test.MEMMRecogniserTest


  /**
   * @param args
   */
  public static void main(String[] args) throws Exception {
    LuceneIndexerSearcher lis = new LuceneIndexerSearcher(false);
    IndexSearcher is = lis.getIndexSearcher();
    IndexReader ir = lis.getIndexReader();
   
    List<String> docFiles = new ArrayList<String>();
    for(int i=0;i<ir.numDocs();i++) {
      docFiles.add(ir.document(i).getField("filename").stringValue().replaceAll("markedup", "source"));
    }
   
    TermEnum textEnum = ir.terms();
    Map<String,Integer> docFreqs = new HashMap<String,Integer>();
    while(textEnum.next()) {
      Term t = textEnum.term();
      if("txt".equals(t.field())) {
        String text = t.text();
        if(TermSets.getClosedClass().contains(text)) continue;
        if(!text.matches(".*[A-Za-z].*")) continue;
        int docFreq = ir.docFreq(t);
        if(docFreq > 1) {
          docFreqs.put(text, ir.docFreq(t));
        }
      }     
    }
    List<String> dfl = StringTools.getSortedList(docFreqs);

    List<Query> queries = new ArrayList<Query>();
    for(int i=0;i<50;i++) {
      queries.add(new TermQuery(new Term("txt", dfl.get(i))));
      System.out.println(dfl.get(i));
    }
   
    for(int i=0;i<10;i++) {
      Map<Integer,Integer> bestClusters = new HashMap<Integer,Integer>();
      Map<Integer,Float> bestClusterScores = new HashMap<Integer,Float>();
      List<Map<Integer,Float>> clusters = new ArrayList<Map<Integer,Float>>();
      for(int j=0;j<queries.size();j++) {
        clusters.add(new HashMap<Integer,Float>());
        VectorCollector vc = new VectorCollector();
        is.search(queries.get(j), vc);
        //System.out.println(vc.getResultsVector());
        for(Integer k : vc.getResultsVector().keySet()) {
          float score = vc.getResultsVector().get(k);
          if(score < 0.001) continue;
          if(!bestClusterScores.containsKey(k) || bestClusterScores.get(k) < score) {
            bestClusters.put(k, j);
            bestClusterScores.put(k, score);
          }
        }
      }
      for(Integer j : bestClusters.keySet()) {
        clusters.get(bestClusters.get(j)).put(j, bestClusterScores.get(j));
      }
      //for(Map<Integer,Float> cluster : clusters) System.out.println(cluster);
      queries.clear();
      for(int j=0;j<clusters.size();j++) {
        System.out.println("Size: " + clusters.get(j).size());
        /*if(i == 9) {
          for(Integer k : clusters.get(j).keySet()) {
            System.out.println(docFiles.get(k) + "\t" + bestClusterScores.get(k));
          }
        }*/
        //if(i == 9) ClusterAnalyser.excessAnalyseCluster(clusters.get(j), lis.getIndexReader(), 0.2, true);
        Map<String,Double> scores = ClusterAnalyser.simpleExcessAnalyseCluster(clusters.get(j), lis.getIndexReader(), 0.1);
        BooleanQuery bq = new BooleanQuery(false);
        List<String> terms = StringTools.getSortedList(scores);
        if(terms.size() > 20) terms = terms.subList(0, 20);
        for(String s : terms) {
          System.out.println(s + "\t" + scores.get(s));
          TermQuery tq = new TermQuery(new Term("txt", s));
          tq.setBoost(scores.get(s).floatValue());
          bq.add(new BooleanClause(tq, Occur.SHOULD));
        }
        System.out.println();
        queries.add(bq);
      }
      System.out.println();
    }
    List<Map<Integer,Float>> clusters = new ArrayList<Map<Integer,Float>>();
    final Map<Integer,Integer> clusterSizes = new HashMap<Integer,Integer>();
    for(int j=0;j<queries.size();j++) {
      VectorCollector vc = new VectorCollector();
      is.search(queries.get(j), vc);
      final Map<Integer,Float> cluster = new HashMap<Integer,Float>();
      //System.out.println(vc.getResultsVector());
      for(Integer k : vc.getResultsVector().keySet()) {
        float score = vc.getResultsVector().get(k);
        if(score < 0.2) continue;
        cluster.put(k, score);
      }
      clusters.add(cluster);
      clusterSizes.put(j, cluster.size());
    }
    List<Integer> clustersBySize = new ArrayList<Integer>(clusterSizes.keySet());
    Collections.sort(clustersBySize, Collections.reverseOrder(new Comparator<Integer>() {
      @SuppressWarnings("unchecked")
      public int compare(Integer o1, Integer o2) {
        return clusterSizes.get(o1).compareTo(clusterSizes.get(o2));
      }
    }));
   
    for(Integer j : clustersBySize) {
      final Map<Integer,Float> cluster = clusters.get(j);
      System.out.println("Size:\t" + cluster.size());
      List<Integer> list = new ArrayList<Integer>(cluster.keySet());
      Collections.sort(list, Collections.reverseOrder(new Comparator<Integer>() {
        @SuppressWarnings("unchecked")
        public int compare(Integer o1, Integer o2) {
          return cluster.get(o1).compareTo(cluster.get(o2));
        }
      }));
      for(Integer k : list) {
        System.out.println(docFiles.get(k) + "\t" + cluster.get(k));
      }
      ClusterAnalyser.excessAnalyseCluster(cluster, lis.getIndexReader(), 0.2, true);
      System.out.println();
    }


  }
View Full Code Here


      System.out.println(s + "\t" + tfIdf.get(s));
   
  }
 
  public static Map<String,Double> excessAnalyseCluster(Map<Integer,Float> cluster, IndexReader ir, double threshold, boolean enriched) throws Exception {
    LuceneChemicalIndex lci = new LuceneIndexerSearcher(false).getLci();
    Set<String> inchis = new HashSet<String>();
    Set<String> onts = new HashSet<String>();
   
    List<File> clusterFiles = new ArrayList<File>();
    for(Integer i : cluster.keySet()) {
View Full Code Here

    return scores;
  }

 
  public static void analyseCluster(Map<Integer,Float> cluster, IndexReader ir, DocVectorSimilarity similarity, double threshold) throws Exception {
    LuceneChemicalIndex lci = new LuceneIndexerSearcher(false).getLci();
    List<File> clusterFiles = new ArrayList<File>();
    Bag<String> dfs = new Bag<String>();
    Set<String> inchis = new HashSet<String>();
    Set<String> onts = new HashSet<String>();
    for(Integer i : cluster.keySet()) {
View Full Code Here

      System.out.println(term + "\t" + scores.get(term) + "\t" + overlaps.get(term));
    }
  }

  public static void main(String[] args) throws Exception {
    LuceneIndexerSearcher lis = new LuceneIndexerSearcher(false);
    IndexSearcher is = lis.getIndexSearcher();
   
    Stemmer stemmerTools = new Stemmer(new EnglishStemmer());
   
    //QueryParser qp = new Oscar3QueryParser("txt", new Oscar3Analyzer(), lis, false);
    //Query q = qp.parse("NaCl");
   
    String queryTerm = "content";
    //PhraseQuery pq = new PhraseQuery();
    //pq.add(new Term("txt", "aromatase"));
    //pq.add(new Term("txt", "inhibitors"));
    //Query q = new TermQuery(new Term("txt", queryTerm));
    Query q = new StemQuery(new Term("txt", queryTerm), stemmerTools);
    //q = pq;
    VectorCollector vc = new VectorCollector();
    is.search(q, vc);
    Map<String,Double> scores = simpleExcessAnalyseCluster(vc.getResultsVector(), lis.getIndexReader(), 0.01);
    for(String s : StringTools.getSortedList(scores)) {
      System.out.println(s + "\t" + scores.get(s));
    }

    //tfIdfAnalyseCluster(vc.getResultsVector(), lis.getIndexReader());
View Full Code Here

    //  return;
    //}
   
    ChiSquaredDistribution csd = new ChiSquaredDistributionImpl(1);
    List<Query> lq = new ArrayList<Query>();
    TextMiner tm = new TextMiner(new LuceneIndexerSearcher(false));

    System.out.println(tm.corpusBags.txtBag.totalCount());
    System.out.println(tm.corpusBags.txtBag.getCount("dextromethorphan"));
   
    q = new TermQuery(new Term("txt", "CYP2D6"));
View Full Code Here

  /**
   * @param args
   */
  public static void main(String[] args) throws Exception {
    LuceneIndexerSearcher lis = new LuceneIndexerSearcher(false);
   
    Query q = new FuzzyQuery(new Term("txt", "adrenaline"), 0.1f);
    Query qq = lis.getIndexSearcher().rewrite(q);
   
    if(qq instanceof BooleanQuery) {
      BooleanQuery bq = (BooleanQuery)qq;
      BooleanClause [] clauses = bq.getClauses();
      final HashMap<String,Float> hm = new HashMap<String,Float>();
View Full Code Here

  /**
   * @param args
   */
  public static void main(String[] args) throws Exception {
    // TODO Auto-generated method stub
    LuceneIndexerSearcher lis = new LuceneIndexerSearcher(false);
    IndexReader ir = lis.getIndexReader();
    Directory dir = new RAMDirectory(ir.directory());
    ir.close();
    IndexSearcher is = new IndexSearcher(dir);
    ir = is.getIndexReader();
    /*LuceneIndexerSearcher lis = new LuceneIndexerSearcher(false);
View Full Code Here

      int skip = 0;
     
      if(request.getParameter("size") != null) size = Integer.parseInt(request.getParameter("size"));
      if(request.getParameter("skip") != null) skip = Integer.parseInt(request.getParameter("skip"));
     
      UserQuery uq = new UserQuery(rt, size, skip);
      if(request.getParameter("morelikethis") != null) {
        uq.setToMoreLikeThis(Integer.parseInt(request.getParameter("morelikethis")));
      }
      if(request.getParameter("query") != null) {
        String query = request.getParameter("query");
        String queryType = request.getParameter("type");
        String parameter = request.getParameter("parameter");
       
        uq.addTerm(query, queryType, parameter);
       
        for(int i=2;request.getParameter("query" + Integer.toString(i)) != null;i++) {
          query = request.getParameter("query" + Integer.toString(i));
          queryType = request.getParameter("type" + Integer.toString(i));
          parameter = request.getParameter("parameter" + Integer.toString(i));
          uq.addTerm(query, queryType, parameter);     
        }
       
      }
   
      try {
View Full Code Here

        ne.addAttribute(new Attribute("InChI", a));
        ne.appendChild(name);
        li.appendChild(ne);

        Element anchor = new Element("a");
        UserQuery nuq = new UserQuery(uq, UserQuery.ResultsType.SNIPPETS);
        nuq.addTerm(a, "inchi", "");
        anchor.addAttribute(new Attribute("href", nuq.getQueryURL(0, nuq.getSize())));
        anchor.appendChild("search");
       
        li.appendChild(" ");
        li.appendChild(anchor);
      } else if(a.matches("0\\.\\d+ like InChI=.+")) {
        String [] sp = a.split("\\s+");
        String name = lis.nameForInChI(sp[2]);
        li.appendChild(sp[0] + " like ");
       
        Element ne = new Element("ne");
        ne.addAttribute(new Attribute("type", "CM"));
        ne.addAttribute(new Attribute("InChI", a));
        ne.appendChild(name);
        li.appendChild(ne);
       
      } else if(a.startsWith("ONT: ")) {
        String postFix = a.substring(5);
        String name = OBOOntology.getInstance().getNameForID(postFix);
        Element ne = new Element("ne");
        ne.addAttribute(new Attribute("type", "ONT"));
        ne.addAttribute(new Attribute("ontIDs", postFix));
        ne.appendChild(name);
        li.appendChild(ne);
       
        Element anchor = new Element("a");
        UserQuery nuq = new UserQuery(uq, UserQuery.ResultsType.SNIPPETS);
        nuq.addTerm(postFix, "ontology", "");
        anchor.addAttribute(new Attribute("href", nuq.getQueryURL(0, nuq.getSize())));
        anchor.appendChild("search");
       
        li.appendChild(" ");
        li.appendChild(anchor);       
        } else {
        li.appendChild(a);       

        Element anchor = new Element("a");
        UserQuery nuq = new UserQuery(uq, UserQuery.ResultsType.SNIPPETS);
        nuq.addTerm(a, "word", "strict");
        anchor.addAttribute(new Attribute("href", nuq.getQueryURL(0, nuq.getSize())));
        anchor.appendChild("search");
       
        li.appendChild(" ");
        li.appendChild(anchor);
      }
View Full Code Here

    BooleanQuery bq = new BooleanQuery(true);
    if(queryItems.size() <= BooleanQuery.getMaxClauseCount()) {
      for(String ont : queryItems) {
        bq.add(new BooleanClause(new TermQuery(new Term("Ontology", ont)), Occur.SHOULD));
      }
      VectorCollector vc = new VectorCollector();
      is.search(bq, vc);
      cache.put(ontQ, vc);
      return vc;
    }
    return new VectorCollector();
  }
View Full Code Here

TOP

Related Classes of uk.ac.cam.ch.wwmm.oscar3.recogniser.test.MEMMRecogniserTest

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.