/*
* Copyright (C) 2006 http://www.chaidb.org
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
*/
package org.chaidb.db.index.btree.hbt;
import org.apache.log4j.Logger;
import org.chaidb.db.KernelContext;
import org.chaidb.db.exception.ChaiDBException;
import org.chaidb.db.index.btree.AbstractBTree;
import org.chaidb.db.index.btree.BTreeSpec;
import org.chaidb.db.index.btree.DataPage;
import org.chaidb.db.index.btree.bufmgr.PageBufferManager;
import org.chaidb.db.index.btree.bufmgr.PageNumber;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.NoSuchElementException;
class HyperBTreeCachedIterator extends HyperBTreeIterator {
private static final Logger logger = Logger.getLogger(HyperBTreeCachedIterator.class);
private static int cachedPages = 2; //according to QA's test
// private static int getCachedPages() {
// int ret = 1;
// String prtStr = System.getProperty(
// "chaidb.hbt.cachediterator.pages");
// if (prtStr != null) {
// try {
// ret = Integer.parseInt(prtStr);
// } catch (NumberFormatException e) {
// ret = 1;
// }
// }
// return ret;
// }
private int index; //current node index in cache
private int length; //length of current cache
private List bulkCache;
private NodePosition bulkNextNodePos; //position of the next node after cache
HyperBTreeCachedIterator(AbstractBTree btree, KernelContext kContext) {
super(btree, kContext);
}
void initIterator(NodePosition firstNode) {
super.initIterator(firstNode); //Default template
bulkNextNodePos = _nextNode.nodeNextPosition;
}
/**
* Remove a node in current bulk
*/
public void remove() {
if (index == bulkCache.size() - 1) {
_nextNode = null;
if (bulkNextNodePos != null) {
bulkCache = nextBulk(cachedPages);
length = bulkCache.size();
if (length > 0) {
index = -1;
_nextNode = (NodeInfo) bulkCache.get(0);
}
}
}
super.remove();
}
public boolean hasNext() {
if (index < length - 1) {
return true;
} else {
bulkCache = nextBulk(cachedPages);
length = bulkCache.size();
if (length > 0) {
index = -1;
return true;
}
}
return false; //default template
}
public Object next() throws NoSuchElementException {
index++;
if (index >= 0 && index <= length - 1) {
if (index < length - 1) {
_nextNode = (NodeInfo) bulkCache.get(index + 1);
} else {
_nextNode = null;
}
_prevNode = _curNode;
_curNode = (NodeInfo) bulkCache.get(index);
return _curNode.data;
} else {
throw new NoSuchElementException();
}
}
/**
* Get next Bulk
*
* @param page how many page you want to get node in the next bulk
* @return An array contains all data nodes of the key in the pages
*/
List nextBulk(int page) {
ArrayList cache = new ArrayList();
if (finished) {
return cache;
}
if (bulkNextNodePos == null) {
return cache;
}
int nPageNo = bulkNextNodePos.getPageNo();
int id = btree.getBtreeId();
final PageBufferManager buffer = btree.getBuffer();
final BTreeSpec btreeSpec = btree.getBTreeSpec();
PageNumber dupPageNumber = new PageNumber(nPageNo);
try {
DataPage dupDataPage = new DataPage(id, dupPageNumber, btreeSpec, buffer);
int pageCount = 0;
while (pageCount < page && !finished) {
NodeInfo node = new NodeInfo();
node.nodePosition = bulkNextNodePos;
int ret = getNode(dupDataPage, node);
bulkNextNodePos = node.nodeNextPosition;
if ((ret & DUP_FINISHED) == DUP_FINISHED) {
finished = true;
if ((ret & DUP_NEXT) == DUP_NEXT) {
break;
}
}
if ((ret & DUP_DATA) == DUP_DATA) {
cache.add(node);
} else if ((ret & DUP_NEXT) == DUP_NEXT) {
if (bulkNextNodePos.getPageNo() == nPageNo) {
continue;
} else {
if (cache.size() > 0) {
pageCount++;
}
if (pageCount < page) {
nPageNo = bulkNextNodePos.getPageNo();
buffer.releasePage(id, dupPageNumber, false);
dupPageNumber = new PageNumber(nPageNo);
dupDataPage = new DataPage(id, dupPageNumber, btreeSpec, buffer);
}
}
} else {
logger.error("Failed to get next dup node.");
}
}
} catch (ChaiDBException e) {
logger.error(e);
} finally {
buffer.releasePage(id, dupPageNumber, false);
}
return Collections.unmodifiableList(cache);
}
}