public static SavePageResult savePage(final Connection connection, final String title0,
final Revision newRev, final int prevRevId, final Map<String, String> restrictions,
final SiteInfo siteinfo, final String username, final MyNamespace nsObject) {
long timeAtStart = System.currentTimeMillis();
final String statName = "saving " + title0;
Page oldPage = null;
Page newPage = null;
List<ShortRevision> newShortRevs = null;
BigInteger pageEdits = null;
if (connection == null) {
return new SavePageResult(false, "no connection to Scalaris", true,
oldPage, newPage, newShortRevs, pageEdits, statName,
System.currentTimeMillis() - timeAtStart);
}
String title = MyWikiModel.normalisePageTitle(title0, nsObject);
Transaction scalaris_tx = new Transaction(connection);
// check that the current version is still up-to-date:
// read old version first, then write
int oldRevId = -1;
String pageInfoKey = getPageKey(title0, nsObject);
Transaction.RequestList requests = new Transaction.RequestList();
requests.addRead(pageInfoKey);
Transaction.ResultList results;
try {
results = scalaris_tx.req_list(requests);
} catch (Exception e) {
return new SavePageResult(false,
"unknown exception getting page info (" + pageInfoKey
+ ") from Scalaris: " + e.getMessage(),
e instanceof ConnectionException, oldPage, newPage,
newShortRevs, pageEdits, statName,
System.currentTimeMillis() - timeAtStart);
}
try {
oldPage = results.processReadAt(0).jsonValue(Page.class);
newPage = new Page(oldPage.getTitle(),
oldPage.getId(), oldPage.isRedirect(),
new LinkedHashMap<String, String>(
oldPage.getRestrictions()), newRev);
oldRevId = oldPage.getCurRev().getId();
} catch (NotFoundException e) {
// this is ok and means that the page did not exist yet
newPage = new Page();
newPage.setTitle(title0);
newPage.setCurRev(newRev);
} catch (Exception e) {
return new SavePageResult(false, "unknown exception reading \""
+ pageInfoKey + "\" from Scalaris: " + e.getMessage(),
e instanceof ConnectionException, oldPage, newPage,
newShortRevs, pageEdits, statName,
System.currentTimeMillis() - timeAtStart);
}
if (!newPage.checkEditAllowed(username)) {
return new SavePageResult(false,
"operation not allowed: edit is restricted", false,
oldPage, newPage, newShortRevs, pageEdits, statName,
System.currentTimeMillis() - timeAtStart);
}
if (prevRevId != oldRevId) {
return new SavePageResult(false, "curRev(" + oldRevId
+ ") != oldRev(" + prevRevId + ")", false, oldPage,
newPage, newShortRevs, pageEdits, statName,
System.currentTimeMillis() - timeAtStart);
}
// write:
// get previous categories, templates and backlinks:
final MyWikiModel wikiModel = new MyWikiModel("", "", new MyNamespace(siteinfo));
wikiModel.setPageName(title0);
Set<String> oldCats;
Set<String> oldTpls;
Set<String> oldLnks;
if (oldRevId != -1 && oldPage != null && oldPage.getCurRev() != null) {
// get a list of previous categories and templates:
wikiModel.setUp();
final long timeAtRenderStart = System.currentTimeMillis();
wikiModel.render(null, oldPage.getCurRev().unpackedText());
timeAtStart -= (System.currentTimeMillis() - timeAtRenderStart);
// note: no need to normalise the pages, we will do so during the write/read key generation
oldCats = wikiModel.getCategories().keySet();
oldTpls = wikiModel.getTemplates();
if (Options.WIKI_USE_BACKLINKS) {
oldLnks = wikiModel.getLinks();
} else {
// use empty link lists to turn back-links off
oldLnks = new HashSet<String>();
}
wikiModel.tearDown();
} else {
oldCats = new HashSet<String>();
oldTpls = new HashSet<String>();
oldLnks = new HashSet<String>();
}
// get new categories and templates
wikiModel.setUp();
do {
final long timeAtRenderStart = System.currentTimeMillis();
wikiModel.render(null, newRev.unpackedText());
timeAtStart -= (System.currentTimeMillis() - timeAtRenderStart);
} while (false);
if (wikiModel.getRedirectLink() != null) {
newPage.setRedirect(true);
}
if (restrictions != null) {
newPage.setRestrictions(restrictions);
}
// note: do not tear down the wiki model - the following statements
// still need it and it will be removed at the end of the method anyway
// note: no need to normalise the pages, we will do so during the write/read key generation