Examples of Wiki


Examples of com.ibm.sbt.services.client.connections.wikis.Wiki

      long start = System.currentTimeMillis();
       
        HashMap<String, String> params = new HashMap<String, String>();
        params.put("includeTags", "true");
        params.put("ps", "20");
        Wiki wiki = wikiService.getWiki(wikiLabel, params);
        JsonJavaObject result = entityToJsonObject(wiki, WikiXPath.values(),
                null);
       
        JsonJavaArray pagesArray = new JsonJavaArray();
        int page = 1;
View Full Code Here

Examples of edu.scripps.mwsync.Wiki

  static final String TEST_ROOT = "/etc/gwsync/tests/";

  @BeforeClass
  public static void setUp() throws Exception {
    source = null;
    target = new Wiki("genewikiplus.org", "");
    target.setUsingCompressedRequests(false);
    mockTarget = new MockWiki("genewikiplus.org", "");
    mockTarget.setUsingCompressedRequests(false);
    period = 5;
View Full Code Here

Examples of edu.scripps.mwsync.Wiki

public class TestBugs {

  @Test
  public void testNoAnnotationsBug() throws IOException {
    Wiki w = new Wiki("genewikiplus.org", "");
    Wiki mock = new MockWiki("genewikiplus.org", "");
    mock.setUsingCompressedRequests(false);
    w.setUsingCompressedRequests(false);
    String dmt1 = w.getPageText("DMT1");
    GeneWikiEditor e = new GeneWikiEditor("/etc/gwsync/", "10058c23-e837-40e8-8e49-9bd52f94c283", "file:/etc/gwsync/doid.owl");
    String result = GeneWikiEditor.appendDetachedAnnotations(dmt1, mock);
    System.out.println(result);
View Full Code Here

Examples of gannuNLP.dictionaries.Wiki

  {   
    super(dict,name);
    this.dict=dict;
    File tmp=new File("./data/matrices/"+dict.getCompleteName().replace(">", "@@@@@")+"/");
    tmp.mkdirs();
    Wiki wiki=(Wiki)this.dict;
    for(Input doc:targets)
    {
      System.out.println("Calculating matrices for "+doc.toString());
      for(AmbiguousWord word:doc.getAmbiguousWords())
      {       
        if(word.getCurrentLemmaObject()!=null&&word.getSenses().size()>0&&word.getSenses().size()<150)
        {
          System.out.print(".");
          File fout=new File("./data/matrices/"+dict.getCompleteName().replace(">", "@@@@@")+"/"+Dictionary.normalizeLemmaforFile(word.getLemma())+".gmx");;
          if(!fout.exists())
          {           
            SmallPair p=new SmallPair(word.getLemmaObject());           
            for(int i=0;i<p.getLemma().getSenses().size();i++)
            {
              p.getCounts()[i][i]=(int)wiki.getCounts(p.getLemma().getSenses().get(i),p.getLemma().getSenses().get(i));
              for(int j=0;j<p.getLemma().getSenses().size();j++)
              {
                if(p.getCounts()[i][i]<(2*p.getLemma().getSenses().size()))
                {
                  if(p.getCounts()[i][i]==0)
                  {
                    p.getCounts()[i][j]=1;
                    p.getCounts()[j][i]=1
                  }
                  else
                  {
                    p.getCounts()[i][j]=p.getCounts()[i][i];
                    p.getCounts()[j][i]=p.getCounts()[i][i];
                  }                 
                }
                else
                {
                  if(j>i)
                  {
                    p.getCounts()[i][j]=0;
                  }
                }               
              }
            }
         
            for(int i=0;i<p.getLemma().getSenses().size();i++)
            {
              for(int j=i+1;j<p.getLemma().getSenses().size();j++)
              {
                if(p.getCounts()[i][i]>=(2*p.getLemma().getSenses().size())&&p.getCounts()[j][j]>=(2*p.getLemma().getSenses().size()))
                {
                  p.getCounts()[i][j]=(int)wiki.getCounts(p.getLemma().getSenses().get(i),p.getLemma().getSenses().get(j));
                  p.getCounts()[j][i]=p.getCounts()[i][j];
                  if(p.getCounts()[j][i]>=(2*p.getLemma().getSenses().size()))
                  {
                    i=p.getLemma().getSenses().size();
                    break;
 
View Full Code Here

Examples of gannuNLP.dictionaries.Wiki

   *  This method removes undesired senses from a lemma extracted from Wikipedia.
   *  @param lemma The target lemma.
   */
  public void modifyBow(Lemma lemma) throws Exception {
    ArrayList<Sense> removeList=new ArrayList<Sense>();
    Wiki w=(Wiki)((DataBroker)this.dict).getSource();
    int i=0;
   
    for(Sense s:lemma.getSenses())
    {
      if(w.isNotAnArticle(s.getSid()))
      {
        removeList.add(s);
      }
      i++;
    }
    for(Sense s:removeList)
    {
      lemma.getSenses().remove(s);
    }
    for(Sense s:lemma.getSenses())
    {
      for(ArrayList<Relation> list: s.getRelations().values())
      {
       
        for(int j=0;j<list.size();j++)
        {
          if(w.isNotAnArticle(list.get(j).getSid()))
          {
            list.remove(j);
            j--;
          }
       
View Full Code Here

Examples of gannuNLP.dictionaries.Wiki

  /**
   * @param args
   */
  public static void main(String[] args)throws Exception{
    Wiki w=new Wiki();
    w.setVersion(args[0]);
    w.setPath(args[1]);
    w.load("all");
    File d=new File(args[3]);
    d.mkdirs();
    w.createInputFromArticle(args[2], args[3]);
  }
View Full Code Here

Examples of org.olat.modules.wiki.Wiki

   *      java.lang.String, boolean, java.lang.Object)
   */
  public Topic lookupTopic(String virtualWiki, String topicName, boolean deleteOK, Object transactionObject) throws Exception {
    String decodedName = null;

    Wiki wiki = WikiManager.getInstance().getOrLoadWiki(ores);
    try {
      decodedName = URLDecoder.decode(topicName, "utf-8");
    } catch (UnsupportedEncodingException e) {
      //
    }
    if (Tracing.isDebugEnabled(OlatWikiDataHandler.class)) {
      Tracing.logDebug("page name not normalized: " + topicName, OlatWikiDataHandler.class);
      Tracing.logDebug("page name normalized: " + FilterUtil.normalizeWikiLink(topicName), OlatWikiDataHandler.class);
      try {
        Tracing.logDebug("page name urldecoded name: " + URLDecoder.decode(topicName, "utf-8"), OlatWikiDataHandler.class);
        Tracing.logDebug("page name urldecoded and normalized: " + FilterUtil.normalizeWikiLink(URLDecoder.decode(topicName, "utf-8")),
            OlatWikiDataHandler.class);
        Tracing.logDebug("page name urldecoded normalized and transformed to id: "
            + wiki.generatePageId(FilterUtil.normalizeWikiLink(decodedName)), OlatWikiDataHandler.class);
      } catch (UnsupportedEncodingException e) {
        //
      }
    }
    Topic topic = new Topic();
    if (decodedName.startsWith(IMAGE_NAMESPACE)) {
      String imageName = topicName.substring(IMAGE_NAMESPACE.length());
      if (!wiki.mediaFileExists(imageName)) return null;
      topic.setName(imageName);
      topic.setTopicType(Topic.TYPE_IMAGE);
      return topic;
    } else if (decodedName.startsWith(MEDIA_NAMESPACE)) {
      String mediaName = topicName.substring(MEDIA_NAMESPACE.length(), topicName.length());
      if (!wiki.mediaFileExists(mediaName)) return null;
      topic.setName(mediaName);
      topic.setTopicType(Topic.TYPE_FILE);
      return topic;
    }
    if (wiki.pageExists(wiki.generatePageId(FilterUtil.normalizeWikiLink(decodedName)))) {
      topic.setName(topicName);
      return topic;
    }
    return null;
  }
View Full Code Here

Examples of org.olat.modules.wiki.Wiki

//      Utilities.validateTopicName(topic);
//    } catch (WikiException e) {
//      throw new OLATRuntimeException(this.getClass(), "invalid topic name!", e);
//    }
   
    Wiki wiki = WikiManager.getInstance().getOrLoadWiki(ores);
    if (topic.startsWith(IMAGE_NAMESPACE) || topic.startsWith(MEDIA_NAMESPACE)) {
      return wiki.pageExists(topic);
    }
    String pageId = WikiManager.generatePageId(FilterUtil.normalizeWikiLink(topic));
    return wiki.pageExists(pageId);
  }
View Full Code Here

Examples of org.olat.modules.wiki.Wiki

    // Index Group Wiki
    if (log.isDebug()) log.debug("Analyse Wiki for Group=" + businessGroup);
    CollaborationTools collabTools = CollaborationToolsFactory.getInstance().getOrCreateCollaborationTools(businessGroup);
    if (collabTools.isToolEnabled(CollaborationTools.TOOL_WIKI) ) {
      try {
        Wiki wiki = WikiManager.getInstance().getOrLoadWiki(businessGroup);
          // loop over all wiki pages
          List<WikiPage> wikiPageList = wiki.getAllPagesWithContent();
          for (WikiPage wikiPage : wikiPageList) {
            SearchResourceContext wikiResourceContext = new SearchResourceContext(parentResourceContext);
            wikiResourceContext.setBusinessControlFor(BusinessGroupMainRunController.ORES_TOOLWIKI);
            wikiResourceContext.setDocumentType(TYPE);
            wikiResourceContext.setDocumentContext(businessGroup.getKey() + " ");
View Full Code Here

Examples of org.olat.modules.wiki.Wiki

    if (log.isDebug()) log.debug("Index wiki...");
    String repoEntryName = "*name not available*";
    try {
      RepositoryEntry repositoryEntry = courseNode.getReferencedRepositoryEntry();
      repoEntryName = repositoryEntry.getDisplayname();
      Wiki wiki = WikiManager.getInstance().getOrLoadWiki(courseNode.getReferencedRepositoryEntry().getOlatResource());
      // loop over all wiki pages
      List<WikiPage> wikiPageList = wiki.getAllPagesWithContent();
      for (WikiPage wikiPage : wikiPageList) {
        try {
          SearchResourceContext courseNodeResourceContext = new SearchResourceContext(repositoryResourceContext);
          courseNodeResourceContext.setBusinessControlFor(courseNode);
          courseNodeResourceContext.setDocumentType(TYPE);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.