Package org.wikipediacleaner.api

Examples of org.wikipediacleaner.api.APIException


      XPath xpaContents = XPath.newInstance("/api/expandtemplates/.");
      Element root = getRoot(properties, ApiRequest.MAX_ATTEMPTS);
      return xpaContents.valueOf(root);
    } catch (JDOMException e) {
      log.error("Error expanding templates", e);
      throw new APIException("Error parsing XML", e);
    }
  }
View Full Code Here


      return shouldContinue(
          root, "/api/query-continue/langlinks",
          properties);
    } catch (JDOMException e) {
      log.error("Error updating disambiguation status", e);
      throw new APIException("Error parsing XML", e);
    }
  }
View Full Code Here

          page,
          getRoot(wikipedia, properties, ApiRequest.MAX_ATTEMPTS),
          "/api/query/pages/page");
    } catch (JDOMParseException e) {
      log.error("Error retrieving page content", e);
      throw new APIException("Error parsing XML", e);
    } catch (APIException e) {
      switch (e.getQueryResult()) {
      case RV_NO_SUCH_SECTION:
        // API Bug https://bugzilla.wikimedia.org/show_bug.cgi?id=26627
        page.setExisting(Boolean.FALSE);
View Full Code Here

            pages,
            getRoot(wikipedia, properties, ApiRequest.MAX_ATTEMPTS),
            "/api/query/pages/page");
      } catch (JDOMParseException e) {
        log.error("Error retrieving redirects", e);
        throw new APIException("Error parsing XML", e);
      }
    }
  }
View Full Code Here

  public QueryResult updatePage(
      EnumWikipedia wikipedia, Page page,
      String newContents, String comment,
      boolean forceWatch) throws APIException {
    if (page == null) {
      throw new APIException("Page is null");
    }
    if (newContents == null) {
      throw new APIException("Contents is null");
    }
    if (comment == null) {
      throw new APIException("Comment is null");
    }
    if (wikipedia.getConnection().getLgToken() == null) {
      throw new APIException("You must be logged in to update pages");
    }
    int attemptNumber = 0;
    QueryResult result = null;
    do {
      attemptNumber++;
      Map<String, String> properties = getProperties(ApiRequest.ACTION_EDIT, true);
      properties.put("assert", "user");
      if (page.getContentsTimestamp() != null) {
        properties.put("basetimestamp", page.getContentsTimestamp());
      }
      properties.put("bot", "");
      properties.put("minor", "");
      if (page.getStartTimestamp() != null) {
        properties.put("starttimestamp", page.getStartTimestamp());
      }
      properties.put("summary", comment);
      properties.put("text", newContents);
      properties.put("title", page.getTitle());
      if (wikipedia.getConnection().getEditToken() != null) {
        properties.put("token", wikipedia.getConnection().getEditToken());
      }
      properties.put("watchlist", forceWatch ? "watch" : "nochange");
      checkTimeForEdit(wikipedia.getConnection().getUser(), page.getNamespace());
      try {
        boolean hasCaptcha = false;
        do {
          hasCaptcha = false;
          try {
            result = constructEdit(
                getRoot(wikipedia, properties, 1),
                "/api/edit");
          } catch (CaptchaException e) {
            String captchaAnswer = getCaptchaAnswer(wikipedia, e);
            if (captchaAnswer != null) {
              properties.put("captchaid", e.getId());
              properties.put("captchaword", captchaAnswer);
              hasCaptcha = true;
            } else {
              throw new APIException("CAPTCHA", e);
            }
          }
        } while (hasCaptcha);
      } catch (APIException e) {
        if (e.getHttpStatus() == HttpStatus.SC_GATEWAY_TIMEOUT) {
          log.warn("Gateway timeout, waiting to see if modification has been taken into account");
          waitBeforeRetrying();
          Page tmpPage = page.replicatePage();
          retrieveContents(wikipedia, Collections.singletonList(tmpPage), false, false);
          String tmpContents = tmpPage.getContents();
          if ((tmpContents != null) &&
              (tmpContents.equals(newContents))) {
            return QueryResult.createCorrectQuery(
                tmpPage.getPageId(), tmpPage.getTitle(),
                page.getPageId(), tmpPage.getPageId());
          }
        }
        if (attemptNumber > 1) {
          throw e;
        }
        if (e.getQueryResult() == EnumQueryResult.BAD_TOKEN) {
          waitBeforeRetrying();
          log.warn("Retrieving tokens after a BAD_TOKEN answer");
          retrieveTokens(wikipedia);
        }
      } catch (JDOMParseException e) {
        log.error("Error updating page: " + e.getMessage());
        throw new APIException("Error parsing XML", e);
      }
    } while (result == null);
    return result;
  }
View Full Code Here

  private QueryResult updateSection(
      EnumWikipedia wikipedia,
      Page page, String title, String section,
      String contents, boolean forceWatch) throws APIException {
    if (page == null) {
      throw new APIException("Page is null");
    }
    if (title == null) {
      throw new APIException("Title is null");
    }
    if (contents == null) {
      throw new APIException("Contents is null");
    }
    if (wikipedia.getConnection().getLgToken() == null) {
      throw new APIException("You must be logged in to update pages");
    }
    int attemptNumber = 0;
    QueryResult result = null;
    do {
      attemptNumber++;
      Map<String, String> properties = getProperties(ApiRequest.ACTION_EDIT, true);
      properties.put("assert", "user");
      if (page.getContentsTimestamp() != null) {
        properties.put("basetimestamp", page.getContentsTimestamp());
      }
      properties.put("bot", "");
      properties.put("minor", "");
      properties.put("section", section);
      if (page.getStartTimestamp() != null) {
        properties.put("starttimestamp", page.getStartTimestamp());
      }
      properties.put("summary", title);
      properties.put("text", contents);
      properties.put("title", page.getTitle());
      properties.put("token", wikipedia.getConnection().getEditToken());
      properties.put("watchlist", forceWatch ? "watch" : "nochange");
      checkTimeForEdit(wikipedia.getConnection().getUser(), page.getNamespace());
      try {
        boolean hasCaptcha = false;
        do {
          hasCaptcha = false;
          try {
            result = constructEdit(
                getRoot(wikipedia, properties, 1),
                "/api/edit");
          } catch (CaptchaException e) {
            String captchaAnswer = getCaptchaAnswer(wikipedia, e);
            if (captchaAnswer != null) {
              properties.put("captchaid", e.getId());
              properties.put("captchaword", captchaAnswer);
              hasCaptcha = true;
            } else {
              throw new APIException("CAPTCHA", e);
            }
          }
        } while (hasCaptcha);
      } catch (APIException e) {
        if (attemptNumber > 1) {
          throw e;
        }
        if (e.getQueryResult() == EnumQueryResult.BAD_TOKEN) {
          waitBeforeRetrying();
          log.warn("Retrieving tokens after a BAD_TOKEN answer");
          retrieveTokens(wikipedia);
        }
      } catch (JDOMParseException e) {
        log.error("Error updating page: " + e.getMessage());
        throw new APIException("Error parsing XML", e);
      }
    } while (result == null);
    return result;
  }
View Full Code Here

        updateRedirectStatus(
            wiki, pages,
            getRoot(wiki, properties, ApiRequest.MAX_ATTEMPTS));
      } catch (JDOMParseException e) {
        log.error("Error retrieving redirects", e);
        throw new APIException("Error parsing XML", e);
      }
    }
  }
View Full Code Here

        XPath xpaDetails = XPath.newInstance("./@details");
        return QueryResult.createErrorQuery(result, xpaDetails.valueOf(node), xpaWait.valueOf(node));
      }
    } catch (JDOMException e) {
      log.error("Error login", e);
      throw new APIException("Error parsing XML result", e);
    }
    return QueryResult.createErrorQuery(null, null, null);
  }
View Full Code Here

   * @throws JDOMException
   */
  private boolean constructContents(Page page, Element root, String query)
      throws APIException {
    if (page == null) {
      throw new APIException("Page is null");
    }
    boolean redirect = false;
    try {
      XPath xpaPage = XPath.newInstance(query);
      Element node = (Element) xpaPage.selectSingleNode(root);
      if (node != null) {
        XPath xpaNamespace = XPath.newInstance("./@ns");
        page.setNamespace(xpaNamespace.valueOf(node));
        if (node.getAttribute("redirect") != null) {
          redirect = true;
          page.isRedirect(true);
        }
        if (node.getAttribute("missing") != null) {
          page.setExisting(Boolean.FALSE);
        }
        XPath xpaPageId = XPath.newInstance("./@pageid");
        page.setPageId(xpaPageId.valueOf(node));
        XPath xpaStartTimestamp = XPath.newInstance("./@starttimestamp");
        page.setStartTimestamp(xpaStartTimestamp.valueOf(node));
      }
      XPath xpa = XPath.newInstance(query + "/revisions/rev");
      node = (Element) xpa.selectSingleNode(root);
      if (node != null) {
        XPath xpaContents = XPath.newInstance(".");
        XPath xpaRevision = XPath.newInstance("./@revid");
        XPath xpaTimestamp = XPath.newInstance("./@timestamp");
        page.setContents(xpaContents.valueOf(node));
        page.setExisting(Boolean.TRUE);
        page.setRevisionId(xpaRevision.valueOf(node));
        page.setContentsTimestamp(xpaTimestamp.valueOf(node));
      }
      xpa = XPath.newInstance(query + "/protection/pr[@type=\"edit\"]");
      node = (Element) xpa.selectSingleNode(root);
      if (node != null) {
        XPath xpaLevel = XPath.newInstance("./@level");
        page.setEditProtectionLevel(xpaLevel.valueOf(node));
      }
    } catch (JDOMException e) {
      log.error("Error contents for page " + page.getTitle(), e);
      throw new APIException("Error parsing XML result", e);
    }
    return redirect;
  }
View Full Code Here

   * @throws APIException
   */
  private void constructContents(List<Page> pages, Element root, String query)
      throws APIException {
    if (pages == null) {
      throw new APIException("Pages is null");
    }
    try {
      XPath xpaPage = XPath.newInstance(query);
      XPath xpaTitle = XPath.newInstance("./@title");
      XPath xpaRev = XPath.newInstance("./revisions/rev");
      XPath xpaContents = XPath.newInstance(".");
      List resultPages = xpaPage.selectNodes(root);
      Iterator iterPages = resultPages.iterator();
      while (iterPages.hasNext()) {
        Element currentPage = (Element) iterPages.next();
        String title = xpaTitle.valueOf(currentPage);
        Element currentRev = (Element) xpaRev.selectSingleNode(currentPage);
        String contents = xpaContents.valueOf(currentRev);
       
        for (Page page : pages) {
          if (Page.areSameTitle(page.getTitle(), title)) {
            page.setContents(contents);
          }
        }
      }
    } catch (JDOMException e) {
      log.error("Error contents for pages", e);
      throw new APIException("Error parsing XML result", e);
    }
  }
View Full Code Here

TOP

Related Classes of org.wikipediacleaner.api.APIException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.