Package org.wikipediacleaner.api

Examples of org.wikipediacleaner.api.API


   */
  @Override
  public Object construct() {
    try {
      setText(GT._("Retrieving MediaWiki API"));
      API api = APIFactory.getAPI();
      setText(GT._("Getting random page"));
      List<Page> pages = api.getRandomPages(getWikipedia(), 1, false);
      if (pages.size() > 0) {
        title = pages.get(0).getTitle();
      } else {
        title = "";
      }
View Full Code Here


        GT._("Do you want to delete this page on Wikipedia ?\nPlease, enter the reason for deleting the page"),
        "", null);
    if ((reason == null) || (reason.trim().length() == 0)) {
      return;
    }
    API api = APIFactory.getAPI();
    try {
      EnumWikipedia wiki = page.getWikipedia();
      api.deletePage(
          wiki, page, wiki.formatComment(reason.trim(), false));
      if (listener != null) {
        listener.pageDeleted(page.getTitle());
      }
    } catch (APIException ex) {
View Full Code Here

   * @param pageName Page name.
   * @return Language link.
   * @throws APIException
   */
  private String getLanguageLink(String pageName) throws APIException {
    API api = APIFactory.getAPI();
    String link = api.getLanguageLink(from, getWikipedia(), pageName);
    if (link != null) {
      return link;
    }
    Page original = DataManager.getPage(from, pageName, null, null, null);
    //api.retrieveLinksWithRedirects(from, original, null, null);
    api.initializeRedirect(from, Collections.singletonList(original));
    if (!original.isRedirect()) {
      return link;
    }
    api.retrieveContents(from, Collections.singletonList(original), false, true);
    link = api.getLanguageLink(from, getWikipedia(), original.getRedirectTitle());
    if (link == null) {
      return null;
    }
    String destination = original.getRedirectDestination();
    int anchorPos = destination.indexOf('#');
View Full Code Here

            }
          }
        }
      }

      API api = APIFactory.getAPI();
      Page category = DataManager.getPage(toWikipedia, "Category:" + title, null, null, null);
      api.retrieveContents(toWikipedia, Collections.singletonList(category), false, false);
      if (category.isExisting() == null) {
        Utilities.displayWarning(
            textPane.getParent(),
            GT._(
                "Unable to find if category {0} exists in \"{1}\".",
                new Object[] { title, toWikipedia.toString() }));
        return;
      }
      if (Boolean.TRUE.equals(category.isExisting())) {
        String replace = categoryName + ":" + title + ((order != null) ? "|" + order : "");
        int answer = Utilities.displayYesNoWarning(
            textPane.getParent(),
            GT._(
                "The category {0} exists in \"{1}\".\n" +
                "Do you want to replace the category by [[{2}]] ?",
                new Object[] { title, toWikipedia.toString(), replace }));
        if (answer == JOptionPane.YES_OPTION) {
          int startOffset = element.getStartOffset();
          int endOffset = element.getEndOffset();
          try {
            textPane.getDocument().remove(startOffset, endOffset - startOffset);
            textPane.getDocument().insertString(startOffset, "[[" + replace + "]]", element.getAttributes());
            textPane.setCaretPosition(startOffset);
            textPane.moveCaretPosition(startOffset + replace.length());
          } catch (BadLocationException e1) {
            // Nothing to be done
          }
        }
        return;
      }
      String languageLink = api.getLanguageLink(fromWikipedia, toWikipedia, "Category:" + title);
      if (languageLink == null) {
        Utilities.displayInformationMessage(
            textPane.getParent(),
            GT._(
                "The category {0} in the {1} Wikipedia doesn''t have a language link to the {2} Wikipedia.\n" +
View Full Code Here

   *
   * @param pages List of pages members of the categories.
   * @throws APIException
   */
  private void constructCategoryMembers(List<Page> pages) throws APIException {
    final API api = APIFactory.getAPI();
    for (String pageName : elementNames) {
      Page page = DataManager.getPage(getWikipedia(), pageName, null, null, null);
      api.retrieveCategoryMembers(getWikipedia(), page, 0, true);
      List<Page> tmpPages = page.getRelatedPages(Page.RelatedPages.CATEGORY_MEMBERS);
      if (tmpPages != null) {
        for (Page tmpPage : tmpPages) {
          if (!pages.contains(tmpPage)) {
            pages.add(tmpPage);
View Full Code Here

   *
   * @param pages List of articles members of the categories.
   * @throws APIException
   */
  private void constructCategoryMembersArticles(List<Page> pages) throws APIException {
    final API api = APIFactory.getAPI();
    for (String pageName : elementNames) {
      Page page = DataManager.getPage(getWikipedia(), pageName, null, null, null);
      api.retrieveCategoryMembers(getWikipedia(), page, 0, true);
      List<Page> tmpPages = page.getRelatedPages(Page.RelatedPages.CATEGORY_MEMBERS);
      if (tmpPages != null) {
        WPCConfiguration configuration = getWikipedia().getConfiguration();
        for (Page tmpPage : tmpPages) {
          if (!tmpPage.isArticle()) {
View Full Code Here

   *
   * @param pages List of pages with missing templates.
   * @throws APIException
   */
  private void constructMissingTemplates(List<Page> pages) throws APIException {
    final API api = APIFactory.getAPI();
    EnumWikipedia wiki = getWikipedia();
    setText(GT._("Retrieving list of missing templates"));
    List<Page> tmpPages = api.getQueryPages(wiki, EnumQueryPage.WANTED_TEMPLATES);
    if (tmpPages == null) {
      return;
    }
    setText(GT._("Checking that the templates are still missing"));
    api.retrieveInfo(wiki, tmpPages);
    List<Page> tmpPages2 = new ArrayList<Page>();
    for (Page tmpPage : tmpPages) {
      Boolean exists = tmpPage.isExisting();
      if (!Boolean.TRUE.equals(exists)) {
        tmpPages2.add(tmpPage);
View Full Code Here

   *
   * @param pages List of protected titles with backlinks.
   * @throws APIException
   */
  private void constructProtectedTitles(List<Page> pages) throws APIException {
    final API api = APIFactory.getAPI();
    EnumWikipedia wiki = getWikipedia();
    setText(GT._("Retrieving list of protected titles"));
    List<Page> tmpPages = api.getProtectedTitles(
        wiki, Collections.singletonList(Namespace.MAIN), false);
    if ((tmpPages == null) || (tmpPages.isEmpty())) {
      return;
    }
    setText(GT._("Checking that protected titles have backlinks"));
View Full Code Here

   *
   * @param pages List of pages.
   * @throws APIException
   */
  private void constructQueryPage(List<Page> pages) throws APIException {
    final API api = APIFactory.getAPI();
    EnumWikipedia wiki = getWikipedia();
    EnumQueryPage query = EnumQueryPage.findByCode(elementNames.get(0));
    List<Page> tmpPages = api.getQueryPages(wiki, query);
    if (tmpPages != null) {
      pages.addAll(tmpPages);
    }
  }
View Full Code Here

   * @param pages List of search results.
   * @throws APIException
   */
  private void constructSearchTitles(List<Page> pages) throws APIException {
    if (elementNames != null) {
      final API api = APIFactory.getAPI();
      for (String pageName : elementNames) {
        Page page = DataManager.getPage(getWikipedia(), pageName, null, null, null);
        api.retrieveSimilarPages(getWikipedia(), page, true);
        pages.addAll(page.getRelatedPages(Page.RelatedPages.SIMILAR_PAGES));
      }
    }
  }
View Full Code Here

TOP

Related Classes of org.wikipediacleaner.api.API

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.