/*
* Copyright (C) 2006 http://www.chaidb.org
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
*/
package org.chaidb.db.index.btree;
import org.apache.log4j.Logger;
import org.chaidb.db.DbEnvironment;
import org.chaidb.db.KernelContext;
import org.chaidb.db.exception.ChaiDBException;
import org.chaidb.db.exception.EncodingException;
import org.chaidb.db.exception.ErrorCode;
import org.chaidb.db.helper.ByteTool;
import org.chaidb.db.index.IDBIndex;
import org.chaidb.db.index.Key;
import org.chaidb.db.index.btree.bufmgr.PageBufferManager;
import org.chaidb.db.index.btree.bufmgr.PageNumber;
import org.chaidb.db.lock.LockManager;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.List;
public class SuperBTree extends BTree {
private static final Logger logger = Logger.getLogger(SuperBTree.class);
//THIS IS ONLY USED BY SUPERBTREE. In fact, real keyType is stored in
//BTreePage used for lookup/store/delete. In genneral, upper codes know
//the keyType. But for defragment which directly operates on BTree, it
//has no knowledge about keyType, so that it read from metapage.
//Remention: ONLY USED BY SUPERBTREE. This variable of Normal BTree is
//never set by upper codes. While that of SuperBTree is set in constructor
//or by setKeyTypes().
int keyTypes[];
public SuperBTree() {
btreeSpec.treeType = BTreeSpec.SUPER_BTREE;
}
public int[] getKeyTypes() {
return keyTypes;
}
public short getType() {
return IDBIndex.SUPER_BTREE;
}
public SuperBTree(int[] keyTypes) throws ChaiDBException {
btreeSpec.treeType = BTreeSpec.SUPER_BTREE;
setKeyTypes(keyTypes);
}
public void setKeyTypes(int[] keyTypes) throws ChaiDBException {
if (keyTypes.length > BTreeSpec.KEYTYPE_SPACE_RESERVED)
throw new ChaiDBException(ErrorCode.SUPERBTREE_HAS_TOO_MANY_LAYERS, "key's length is " + keyTypes.length + " is more than the maximum limit " + BTreeSpec.KEYTYPE_SPACE_RESERVED);
this.getBTreeSpec().setModified(true);
this.keyTypes = new int[keyTypes.length];
System.arraycopy(keyTypes, 0, this.keyTypes, 0, keyTypes.length);
try {
btreeSpec.setLayer((byte) keyTypes.length, null);
} catch (ChaiDBException e) {
logger.error(e);
}
}
public int getLayers() {
return btreeSpec.getLayers();
}
/**
* Get a page with specified pgNum. Here lock is got in the meanwhile.
*
* @param pgNum
* @return
* @throws ChaiDBException
*/
private BTreePage getPage(PageNumber pgNum) throws ChaiDBException {
//Note: we only lock valid and already existed page.
return (new BTreePage(id, pgNum, btreeSpec, getBuffer()));
}
//if this is for the bottom layer, use default converter. Otherwise,
//always return PageNumber of root of next layer
Object convertValue(byte[] data, int level) throws ChaiDBException {
if (level == keyTypes.length) {
if (converter != null) try {
return converter.decodeFromByteArray(null, data);
} catch (EncodingException ee) {
logger.error(ee);
// details -ranjeet
String details = "Converter failed to decode : " + ee.toString() + ".";
throw new ChaiDBException(ErrorCode.CONVERTER_DECODING_ERROR, details);
}
else return data;
} else {
PageNumber pg = new PageNumber(ByteTool.bytesToInt(data, 0, btreeSpec.isMsbFirst()));
pg.setTreeId(id);
return pg;
}
}
private void lookup(Key[] keys, int level, PageNumber root, ValueList values, KernelContext kContext) throws ChaiDBException {
if (level == getLayers()) {
if (keys[level - 1] == null) getAllInfo(root, values, level, kContext, false);
else {
byte[] data = super.lookup(keys[level - 1], root, kContext);
if (data != null) values.add(convertValue(data, level));
}
} else {
ArrayList roots = new ArrayList();
if (keys[level - 1] == null) getAllInfo(root, roots, level, kContext, false);
else {
byte[] data = super.lookup(keys[level - 1], root, kContext);
if (data != null) roots.add(convertValue(data, level));
}
for (int k = 0; k < roots.size(); k++) {
lookup(keys, level + 1, (PageNumber) roots.get(k), values, kContext);
}
}
}
/**
* Lookup the superBTree and get all nodes(NodeId) in the form List.
*
* @param keys First layer key, usually is UDI Key. Never to be NULL.
* @param canNotBeNulls Second layer key, usually is Document Id. Can be null.
* @param kContext
* @return null if this Key path is not found. Othewise, List with nodeid.
* @throws ChaiDBException
*/
public List lookup(Key[] keys, boolean[] canNotBeNulls, KernelContext kContext) throws ChaiDBException {
kContext.checkLock(getBTreeName());
ValueList values = null;
for (int i = 0; i < 3; i++) {
try {
checkKeyPath(keys, canNotBeNulls);
PageNumber root = new PageNumber(getTopRoot());
values = new ValueList(false);
lookup(keys, 1, root, values, kContext);
} catch (ChaiDBException e) {
if (i == 2) {
throw e;
} else {
try {
Thread.sleep(100);
} catch (InterruptedException ie) {
;
}
continue;
}
} finally {
if (PageBufferManager.PROTECT_PAGE) getBuffer().releaseHoldedPage(id);
}
break;
}//end for
return values.size() > 0 ? values : null;
}
private PageNumber store(Key[] keys, byte[] value, short mode, int level, PageNumber root, KernelContext kContext) throws ChaiDBException {
PageNumber newRoot = null;
if (level == getLayers()) {
newRoot = super.store(keys[level - 1], value, mode, root, kContext);
} else {
BTreePage rootPage = getPage(root);
if (rootPage.getPage() == null) {
rootPage = BTreePage.newPage(btreeSpec, getBuffer(), kContext);
rootPage.setLeaf();
rootPage.setPrevPage(new PageNumber(id, 0, 0)); // root
newRoot = rootPage.pageNumber;
}
BTreePage leafPage = rootPage.getLeaf(keys[level - 1], kContext, BTreePage.INSERT);
//unfix rootPage
getBuffer().releasePage(id, rootPage.pageNumber, false);
//find the next layer root
BTreeNode node = leafPage.search(keys[level - 1], kContext);
if (node != null) root = (PageNumber) convertValue(node.getData(kContext), level);
else root.setPageNumber(BTreeSpec.INVALID_PAGENO);
PageNumber tmpPgNum = store(keys, value, mode, level + 1, root, kContext);
boolean leafPageUpdated = (tmpPgNum != null);
if (tmpPgNum != null)
tmpPgNum = leafPage.insert(keys[level - 1], ByteTool.intToBytes(tmpPgNum.getPageNumber(), btreeSpec.isMsbFirst()), STORE_REPLACE, kContext);
getBuffer().releasePage(id, leafPage.pageNumber, leafPageUpdated);
//remember the new root
if (tmpPgNum != null) newRoot = tmpPgNum;
}
return newRoot;
}
public void store(Key[] keys, boolean[] canNotBeNulls, Object value, short mode, KernelContext kContext) throws ChaiDBException {
if (DbEnvironment.READ_ONLY) throw new ChaiDBException(ErrorCode.DATABASE_IS_READONLY);
kContext.checkLock(getBTreeName());
checkKeyPath(keys, canNotBeNulls);
boolean succ = false;
this.getBTreeSpec().setModified(true);
byte[] data;
try {
if (converter != null) data = converter.encodeToByteArray(value);
else data = (byte[]) value;
} catch (EncodingException ee) {
logger.error(ee);
// details -ranjeet
String details = "Converter failed in encoding " + ee.toString() + ".";
throw new ChaiDBException(ErrorCode.CONVERTER_ENCODING_ERROR, details);
}
for (int retry = 0; retry < BTreeLock.MAX_RETRYS + 1 && !succ; retry++) {
try {
PageNumber root = new PageNumber(getTopRoot());
root = store(keys, data, mode, 1, root, kContext);
if (root != null) updateRootOnMetaPage(root, kContext);
succ = true;
} catch (ChaiDBException e) {
if (e.getErrorCode() != ErrorCode.LOCK_NO_GRANT || retry == BTreeLock.MAX_RETRYS) throw e;
} finally {
doFinalJob();
}
}
}
private void deleteSubTreeRecursively(PageNumber root, int level, KernelContext kContext) throws ChaiDBException {
if (level == getLayers()) super.delSubTreeCore(root, true, kContext);
else {
ArrayList trees = new ArrayList();
getAllInfo(root, trees, level, kContext, false);
for (int i = 0; i < trees.size(); i++)
deleteSubTreeRecursively((PageNumber) trees.get(i), level + 1, kContext);
super.delSubTreeCore(root, true, kContext);
}
}
/**
* Delete a key recursively.
*
* @param keys the keys, keys[0](inclusive)~keys[endLevel-1](inclusive) are all not null.
* all remant keys are null.
* @param level
* @param root
* @param endLevel the minimum value between layers and
* the lowest value where key[endLevel] is null.
* @param kContext
* @throws ChaiDBException
*/
private OperResult delete(Key[] keys, int level, PageNumber root, int endLevel, KernelContext kContext) throws ChaiDBException {
if (DbEnvironment.READ_ONLY) throw new ChaiDBException(ErrorCode.DATABASE_IS_READONLY);
OperResult result;
if (level == endLevel) {
if (keys[level - 1] == null) {
deleteSubTreeRecursively(root, level, kContext);
result = new OperResult(true, new PageNumber(BTreeSpec.INVALID_PAGENO));
} else result = delete(keys[level - 1], root, kContext);
} else {
BTreePage rootPage = getPage(root);
if (rootPage.getPage() != null) {
BTreePage leafPage = rootPage.getLeaf(keys[level - 1], kContext, BTreePage.INSERT);
//unfix rootPage
getBuffer().releasePage(id, rootPage.pageNumber, false);
//find the next layer root
BTreeNode node = leafPage.search(keys[level - 1], kContext);
boolean leafPageUpdated = false;
if (node != null) {
root = (PageNumber) convertValue(node.getData(kContext), level);
result = delete(keys, level + 1, root, endLevel, kContext);
leafPageUpdated = (result.success & result.newRoot != null);
if (leafPageUpdated) if (result.newRoot.getPageInFile() == BTreeSpec.INVALID_PAGENO) {
result = leafPage.delete(keys[level - 1], kContext);
} else {
result.newRoot = leafPage.insert(keys[level - 1], ByteTool.intToBytes(result.newRoot.getPageNumber(), btreeSpec.isMsbFirst()), STORE_REPLACE, kContext);
}
} else result = new OperResult(false, null);
getBuffer().releasePage(id, leafPage.pageNumber, leafPageUpdated);
} else result = new OperResult(false, null);
}
return result;
}
private boolean fillKeyAndDelete(Key keys[], int level, int endLevel, PageNumber root, KernelContext kContext) throws ChaiDBException {
if (level < endLevel) {
ArrayList roots = new ArrayList();
ArrayList keysThisLayer = new ArrayList();
if (keys[level - 1] == null) {
getAllInfo(root, roots, level, kContext, false);
getAllInfo(root, keysThisLayer, level, kContext, true);
} else {
byte[] data = super.lookup(keys[level - 1], root, kContext);
if (data != null) {
keysThisLayer.add(keys[level - 1]);
roots.add(convertValue(data, level));
}
}
if (roots.size() == 0) return false;
boolean succ = true;
for (int k = 0; k < roots.size(); k++) {
keys[level - 1] = (Key) keysThisLayer.get(k);
succ &= fillKeyAndDelete(keys, level + 1, endLevel, (PageNumber) roots.get(k), kContext);
}
return succ;
} else {
return deleteWithQulifiedKey(keys, endLevel, kContext);
}
}
//here all keys beyond key[endLevel] exclusive are not null. While remenat are all null.
private boolean deleteWithQulifiedKey(Key keys[], int endLevel, KernelContext kContext) throws ChaiDBException {
PageNumber topRoot = new PageNumber(getTopRoot());
OperResult result = delete(keys, 1, topRoot, endLevel, kContext);
if (result.success & result.newRoot != null) updateRootOnMetaPage(result.newRoot, kContext);
return result.success;
}
//return true only if delete success.
public boolean delete(Key[] keys, boolean[] canNotBeNulls, KernelContext kContext) throws ChaiDBException {
kContext.checkLock(getBTreeName());
checkKeyPath(keys, canNotBeNulls);
int endLevel = keys.length;
while (keys[endLevel - 1] == null) endLevel--;
endLevel = (endLevel == keys.length) ? endLevel : ++endLevel;
Key[] tmpKeys = new Key[keys.length];
for (int i = 0; i < keys.length; i++)
if (keys[i] != null) tmpKeys[i] = (Key) keys[i].clone();
boolean succ = false;
this.getBTreeSpec().setModified(true);
for (int retry = 0; retry < BTreeLock.MAX_RETRYS + 1 && !succ; retry++) {
try {
PageNumber topRoot = new PageNumber(getTopRoot());
//here explictly lock metapage again, to implement tree-level lock.
succ = this.fillKeyAndDelete(tmpKeys, 1, endLevel, topRoot, kContext);
} catch (ChaiDBException e) {
if (e.getErrorCode() != ErrorCode.LOCK_NO_GRANT || retry == BTreeLock.MAX_RETRYS) throw e;
} finally {
doFinalJob();
}
}
return succ;
}
/**
* Lookup btree to get values with the key in the scope constrainted by
* minKeys[] and maxKeys[]. There are some rules here:
* (1) Either minKeys or maxKeys may be null, but they can't both be null
* (2) Only the top consequent null key elements, (say, minKeys[0]~minKeys[n1], or
* maxKeys[0]~maxKeys[n2], where n1 may be not equal to n2 are all not null)
* are used.
* (3) Each key[] constructs a path in the multiple layers. In Nth layer, minKey[N]
* and maxKey[N] constraints the bound of values in this layer. If eithe is null,
* collect values lower than (minKey[N] is null) or greater than (maxKey[N] is null)
*
* @param minKeys
* @param maxKeys
* @param canNotBeNulls
* @param includeMinKey true means nodes with the minKey are included
* in the value list. Otherwise, they are excluded.
* @param includeMaxKey true means nodes with the maxKey are included
* in the value list. Otherwise, they are excluded.
* @param kContext
* @return
* @throws ChaiDBException
*/
public List rangeLookup(Key[] minKeys, Key[] maxKeys, boolean[] canNotBeNulls, boolean[] includeMinKey, boolean[] includeMaxKey, KernelContext kContext) throws ChaiDBException {
kContext.checkLock(getBTreeName());
boolean succ = false;
ValueList values = new ValueList(false);
// for (int retry=0;retry<BTreeLock.MAX_RETRYS+1 && !succ; retry++) {
for (int retry = 0; retry < 3; retry++) {
try {
PageNumber topRoot = new PageNumber(getTopRoot());
rangeLookup(minKeys, maxKeys, 1, topRoot, includeMinKey, includeMaxKey, values, kContext);
} catch (ChaiDBException e) {
// if (e.getErrorCode()!=ErrorCode.LOCK_NO_GRANT || retry==BTreeLock.MAX_RETRYS)
// throw e;
if (retry == 2) {
throw e;
} else {
try {
Thread.sleep(100);
} catch (InterruptedException ie) {
;
}
continue;
}
} finally {
doFinalJob();
}
break;
}//end for
return values.size() > 0 ? values : null;
}
private void rangeLookup(Key[] minKeys, Key[] maxKeys, int level, PageNumber root, boolean[] includeMinKey, boolean[] includeMaxKey, ValueList values, KernelContext kContext) throws ChaiDBException {
if (level == getLayers()) {
if (minKeys[level - 1] == null && maxKeys[level - 1] == null)
getAllInfo(root, values, level, kContext, false);
else {
ArrayList list = new ArrayList();
super.rangeLookup(minKeys[level - 1], maxKeys[level - 1], includeMinKey[level - 1], includeMaxKey[level - 1], root, list, kContext);
if (list.size() > 0) for (int i = 0; i < list.size(); i++)
values.add(convertValue((byte[]) list.get(i), level));
}
} else {
ArrayList roots = new ArrayList();
if (minKeys[level - 1] == null && maxKeys[level - 1] == null)
getAllInfo(root, roots, level, kContext, false);
else {
super.rangeLookup(minKeys[level - 1], maxKeys[level - 1], includeMinKey[level - 1], includeMaxKey[level - 1], root, roots, kContext);
for (int i = 0; i < roots.size(); i++)
roots.set(i, convertValue((byte[]) roots.get(i), level));
}
for (int k = 0; k < roots.size(); k++) {
rangeLookup(minKeys, maxKeys, level + 1, (PageNumber) roots.get(k), includeMinKey, includeMaxKey, values, kContext);
}
}
}
private int countValuesOfTree(PageNumber root, int layer, KernelContext kc) throws ChaiDBException {
BTreePage page = findLeftMostLeaf(root);
int count = 0;
while (page.pageNumber.getPageInFile() > BTreeSpec.INVALID_PAGENO) {
if (layer >= getLayers()) count += page.getCurrNodeNumbers();
else {
for (int i = 0; i < page.getCurrNodeNumbers(); i++) {
PageNumber newRoot = new PageNumber(ByteTool.bytesToInt(page.getNode(i).getData(kc), 0, btreeSpec.isMsbFirst()));
newRoot.setTreeId(id);
count += countValuesOfTree(newRoot, layer + 1, kc);
}
}
getBuffer().releasePage(id, page.pageNumber, false);
page = new BTreePage(id, page.nextPage, btreeSpec, getBuffer());
}
return count;
}
/**
* Get the number of values in this btree.
*
* @param kc
* @return
* @throws ChaiDBException
*/
public int countValues(KernelContext kc) throws ChaiDBException {
PageNumber root = new PageNumber(getTopRoot());
return countValuesOfTree(root, 1, kc);
}
public Enumeration keys(KernelContext kContext) {
Enumeration keys = null;
for (int i = 0; i < 3; i++) {
try {
keys = new SuperBTreeEnumerator(this, kContext);
} catch (ChaiDBException e) {
if (i == 2) {
logger.error(e);
return null;
} else {
try {
Thread.sleep(100);
} catch (InterruptedException ie) {
;
}
continue;
}
}
break;
}//end for
return keys;
}
protected AbstractBTree GenerateClonedBTree() throws ChaiDBException {
return new SuperBTree(this.keyTypes);
}
//Overload super class method
protected void copyData(AbstractBTree newBTree, KernelContext kContext) throws ChaiDBException {
SuperBTree nb = (SuperBTree) newBTree;
Key[] keys = new Key[getLayers()];
boolean[] constraints0 = new boolean[keys.length];
boolean[] constraints1 = new boolean[keys.length];
constraints0[0] = true;
for (int i = 0; i < constraints1.length; i++)
constraints1[i] = true;
try {
this.acquire(kContext, LockManager.LOCK_READ);
nb.acquire(kContext, LockManager.LOCK_WRITE);
Enumeration keysEnu = keys(kContext);
while (keysEnu.hasMoreElements()) {
keys = (Key[]) keysEnu.nextElement();
ValueList nodes = (ValueList) lookup(keys, constraints0, kContext);
for (int i = 0; nodes != null && i < nodes.size(); i++) {
nb.store(keys, constraints1, nodes.get(i), IDBIndex.STORE_REPLACE, kContext);
}
}
} finally {
nb.release(kContext);
this.release(kContext);
}
}
/**
* Check if the path maken up of k1 and k2 is valid.
*
* @param keys
* @param canNotBeNulls The element with index I is true when and only when keys[i] should
* not be null. The last element is for all key in keys.
* @throws ChaiDBException
*/
private void checkKeyPath(Key[] keys, boolean[] canNotBeNulls) throws ChaiDBException {
boolean allAreNull = true;
boolean atLeastOneIsNotNull = false;
for (int i = 0; i < keys.length; i++) {
atLeastOneIsNotNull |= (keys[i] != null);
if ((keys[i] == null) & (canNotBeNulls[i]))
throw new ChaiDBException(ErrorCode.INVALID_INPUT_PARAM, "Key" + "[" + i + "] can't be null.");
}
if (atLeastOneIsNotNull) return;
for (int i = 0; i < keys.length; i++) {
//to judge all already got result.
allAreNull &= (keys[i] == null);
}
if (allAreNull & canNotBeNulls[keys.length])
throw new ChaiDBException(ErrorCode.INVALID_INPUT_PARAM, "Some Key is null while all keys need be not Null!");
}
/**
* Get all info of a BTree
*
* @param root
* @param kContext
* @param values info containers
* @param isKey true if we want to collect all keys. false, for datas.
* @throws ChaiDBException
*/
private void getAllInfo(PageNumber root, ArrayList values, int level, KernelContext kContext, boolean isKey) throws ChaiDBException {
BTreePage leafPage = findLeftMostLeaf(root);
if (leafPage.getPage() == null) return;
while (true) {
collectInfoOfAPage(leafPage, values, level, kContext, isKey);
getBuffer().releasePage(id, leafPage.pageNumber, false);
if (leafPage.nextPage.getPageInFile() == BTreeSpec.INVALID_PAGENO) return;
else {
leafPage = new BTreePage(id, leafPage.nextPage, btreeSpec, getBuffer());
}
}
}
/**
* Collect all values in a leaf BTreePage
*
* @param leafPage
* @param values
* @param kContext
* @throws ChaiDBException
*/
private void collectInfoOfAPage(BTreePage leafPage, ArrayList values, int level, KernelContext kContext, boolean isKey) throws ChaiDBException {
int nodes = leafPage.getCurrNodeNumbers();
for (int i = 0; i < nodes; i++) {
BTreeNode node = leafPage.getNode(i);
if (isKey) values.add(node.getKey());
else values.add(convertValue(node.getData(kContext), level));
}
}
private class ValueList extends ArrayList {
Hashtable index = new Hashtable();
boolean permitDuplication;
ValueList(boolean permitDuplication) {
this.permitDuplication = permitDuplication;
}
private boolean canAddIn(Object obj) {
if (permitDuplication || !index.containsKey(obj)) return true;
else return false;
}
public boolean add(Object obj) {
if (obj == null) return false;
if (canAddIn(obj)) {
index.put(obj, obj);
super.add(obj);
return true;
} else return false;
}
}
}