Examples of Api


Examples of org.wikipediacleaner.api.API

   * @see org.wikipediacleaner.gui.swing.basic.BasicWindow#dispose()
   */
  @Override
  public void dispose() {
    API api = APIFactory.getAPI();
    api.removeRecentChangesListener(getWikipedia(), this);
    super.dispose();
  }
View Full Code Here

Examples of org.wikipediacleaner.api.API

     */
    @Override
    public Object construct() {
      try {
        setText(GT._("Retrieving MediaWiki API"));
        API api = APIFactory.getAPI();
        setText(GT._("Retrieving templates"));
        api.retrieveTemplates(getWikipedia(), page1);
        setText(GT._("Retrieving links in templates"));
        api.retrieveLinks(getWikipedia(), page1.getTemplates());
        setText(GT._("Displaying templates found"));
        for (Page p : page1.getTemplates()) {
          boolean found = false;
          for (Page l : p.getLinks()) {
            if (link1.getTitle().equals(l.getTitle())) {
View Full Code Here

Examples of org.wikipediacleaner.api.API

   */
  @Override
  public String fix(String fixName, PageAnalysis analysis, MWPane textPane) {

    // Initialize
    API api = APIFactory.getAPI();
    StringBuilder tmpContents = new StringBuilder();
    int currentIndex = 0;

    // Manage templates that can be used to replace a link to an other language
    List<String> templatesList = getTemplatesList();
    String[] templateArgs = null;
    if ((templatesList != null) && (templatesList.size() > 0)) {
      String[] tmp = templatesList.get(0).split("\\|");
      if (tmp.length >= 5) {
        templateArgs = tmp;
      }
    }

    // Check all internal links
    Object highlight = null;
    String contents = analysis.getContents();
    try {
      EnumWikipedia toWiki = analysis.getWikipedia();
      for (PageElementInterwikiLink link : analysis.getInterwikiLinks()) {
        if (isLanguageLink(link, toWiki)) {
          String lgCode = link.getInterwiki().getPrefix();
          EnumWikipedia fromWiki = EnumWikipedia.getWikipedia(lgCode);
          if ((fromWiki != null) && (fromWiki.getSettings().getCode().equals(lgCode))) {
            String pageTitle = link.getLink();
            int beginIndex = link.getBeginIndex();
            int endIndex = link.getEndIndex();
            String replacement = null;

            // Display selection
            highlight = addHighlight(textPane, beginIndex, endIndex);
            textPane.select(beginIndex, endIndex);

            // Check for language link
            String toTitle = api.getLanguageLink(fromWiki, toWiki, pageTitle);
            if (toTitle != null) {

              // List possible replacements
              List<String> possibleValues = new ArrayList<String>();
              String possible = null;
View Full Code Here

Examples of org.wikipediacleaner.api.API

    long startTime = System.currentTimeMillis();
    EnumWikipedia wikipedia = getWikipedia();
    WPCConfiguration configuration = wikipedia.getConfiguration();

    setText(GT._("Retrieving MediaWiki API"));
    API api = APIFactory.getAPI();
    int lastCount = 0;
    WikiConfiguration wikiConfiguration = wikipedia.getWikiConfiguration();

    Stats stats = new Stats();
    try {
      if (!useList) {
        // Retrieve talk pages including a warning
        String warningTemplateName = configuration.getString(WPCConfigurationString.DAB_WARNING_TEMPLATE);
        setText(GT._("Retrieving talk pages including {0}", "{{" + warningTemplateName + "}}"));
        String templateTitle = wikiConfiguration.getPageTitle(
            Namespace.TEMPLATE,
            warningTemplateName);
        Page warningTemplate = DataManager.getPage(
            wikipedia, templateTitle, null, null, null);
        api.retrieveEmbeddedIn(
            wikipedia, warningTemplate,
            configuration.getEncyclopedicTalkNamespaces(),
            false);
        List<Page> warningTalkPages = warningTemplate.getRelatedPages(Page.RelatedPages.EMBEDDED_IN);
 
View Full Code Here

Examples of org.wikipediacleaner.api.API

    try {
      if (progressPanel != null) {
        progressPanel.start();
        progressPanel.setText(GT._("Retrieving MediaWiki API"));
      }
      API api = APIFactory.getAPI();
      if (progressPanel != null) {
        progressPanel.setText(GT._("Purging page cache"));
      }
      api.purgePageCache(wikipedia, page);
    } catch (APIException ex) {
      //
    } finally {
      if (progressPanel != null) {
        progressPanel.stop();
View Full Code Here

Examples of org.wikipediacleaner.api.API

   */
  @Override
  public Object construct() {
    try {
      setText(GT._("Retrieving MediaWiki API"));
      API api = APIFactory.getAPI();
      EnumWikipedia wiki = getWikipedia();

      // Login
      if (!reloadOnly) {
        setText(GT._("Login"));
        LoginResult result = api.login(wiki, username, new String(password), login);
        if (login) {
          if ((result == null) || (!result.isLoginSuccessful())) {
            throw new APIException("Login unsuccessful: " + ((result != null) ? result.toString() : ""));
          }
        }
        User user = api.retrieveUser(wiki, username);
        username = (user != null) ? user.getName() : null;
        wiki.getConnection().setUser(user);
        api.retrieveTokens(wiki);
        logged = true;
      }

      // Load configuration
      setText(GT._("Loading configuration"));
      api.loadConfiguration(wiki, username);

      // Saving settings
      Configuration configuration = Configuration.getConfiguration();
      configuration.setWikipedia(wiki);
      configuration.setLanguage(language);
View Full Code Here

Examples of org.wikipediacleaner.api.API

    try {
      if (progressPanel != null) {
        progressPanel.start();
        progressPanel.setText(GT._("Retrieving MediaWiki API"));
      }
      API api = APIFactory.getAPI();
      if (progressPanel != null) {
        progressPanel.setText(GT._("Retrieving members of a category"));
      }
      api.retrieveCategoryMembers(wiki, category, 0, true);
    } catch (APIException ex) {
      //
    } finally {
      if (progressPanel != null) {
        progressPanel.stop();
View Full Code Here

Examples of org.wikipediacleaner.api.API

      String prefix) throws APIException {

    setText(prefix + " - " + GT._("Analyzing page {0}", page.getTitle()));

    // Retrieve page content
    API api = APIFactory.getAPI();
    api.retrieveContents(getWikipedia(), Collections.singletonList(page), true, false);
    PageAnalysis analysis = page.getAnalysis(page.getContents(), true);

    // Check that robots are authorized to change this page
    if (saveModifications) {
      WPCConfiguration config = getWikipedia().getConfiguration();
      List<String[]> nobotTemplates = config.getStringArrayList(
          WPCConfigurationStringList.NOBOT_TEMPLATES);
      if ((nobotTemplates != null) && (!nobotTemplates.isEmpty())) {
        for (String[] nobotTemplate : nobotTemplates) {
          String templateName = nobotTemplate[0];
          List<PageElementTemplate> templates = analysis.getTemplates(templateName);
          if ((templates != null) && (!templates.isEmpty())) {
            if (analyzeNonFixed) {
              Controller.runFullAnalysis(page.getTitle(), null, getWikipedia());
            }
            return;
          }
        }
      }
    }

    // Analyze page to check if error has been found
    List<CheckErrorPage> errorPages = CheckError.analyzeErrors(allAlgorithms, analysis, true);
    boolean found = false;
    if (errorPages != null) {
      for (CheckErrorPage errorPage : errorPages) {
        if (algorithm.equals(errorPage.getAlgorithm()) &&
            errorPage.getErrorFound()) {
          found = true;
        }
      }
    }

    CheckWiki checkWiki = APIFactory.getCheckWiki();
    if (found) {

      // Fix all errors that can be fixed
      String newContents = page.getContents();
      List<CheckErrorAlgorithm> usedAlgorithms = new ArrayList<CheckErrorAlgorithm>();
      newContents = AutomaticFormatter.tidyArticle(page, newContents, allAlgorithms, true, usedAlgorithms);

      // Save page if errors have been fixed
      if ((!newContents.equals(page.getContents())) &&
          (!usedAlgorithms.isEmpty())) {
        if (!saveModifications) {
          return;
        }
        StringBuilder comment = new StringBuilder();
        if ((extraComment != null) && (extraComment.trim().length() > 0)) {
          comment.append(extraComment.trim());
          comment.append(" - ");
        }
        comment.append(getWikipedia().getCWConfiguration().getComment(usedAlgorithms));
        setText(prefix + " - " + GT._("Fixing page {0}", page.getTitle()));
        api.updatePage(
            getWikipedia(), page, newContents,
            getWikipedia().createUpdatePageComment(comment.toString(), null, true),
            false);
        countModified++;
        for (CheckErrorAlgorithm usedAlgorithm : usedAlgorithms) {
View Full Code Here

Examples of org.wikipediacleaner.api.API

   *
   * @param pages List of all disambiguation pages.
   * @throws APIException
   */
  private void constructAllDab(List<Page> pages) throws APIException {
    final API api = APIFactory.getAPI();
    EnumWikipedia wiki = getWikipedia();
    List<Page> tmpPages = wiki.constuctDisambiguationPages(api);
    if (tmpPages != null) {
      pages.addAll(tmpPages);
    }
View Full Code Here

Examples of org.wikipediacleaner.api.API

   *
   * @param pages List of backlinks.
   * @throws APIException
   */
  private void constructBackLinks(List<Page> pages) throws APIException {
    final API api = APIFactory.getAPI();
    for (String pageName : elementNames) {
      Page page = DataManager.getPage(getWikipedia(), pageName, null, null, null);
      api.retrieveBackLinks(getWikipedia(), page, true);
      List<Page> tmpPages = page.getRelatedPages(Page.RelatedPages.BACKLINKS);
      if (tmpPages != null) {
        for (Page tmpPage : tmpPages) {
          if (!pages.contains(tmpPage)) {
            pages.add(tmpPage);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.