}
}
}
private void testConsistentHashModifications(ConsistentHashFactory<DefaultConsistentHash> chf, Hash hashFunction, List<Address> nodes, int ns, int no, Map<Address, Float> lfMap) {
DefaultConsistentHash baseCH = chf.create(hashFunction, no, ns, nodes, lfMap);
assertEquals(lfMap, baseCH.getCapacityFactors());
checkDistribution(baseCH, lfMap, false);
// each element in the array is a pair of numbers: the first is the number of nodes to add
// the second is the number of nodes to remove (the index of the removed nodes are pseudo-random)
int[][] nodeChanges = {{1, 0}, {2, 0}, {0, 1}, {0, 2}, {1, 1}, {1, 2}, {2, 1}, {10, 0}, {0, 10}};
// check that the base CH is already balanced
List<Address> baseMembers = baseCH.getMembers();
assertSame(baseCH, chf.updateMembers(baseCH, baseMembers, lfMap));
assertSame(baseCH, chf.rebalance(baseCH));
// starting point, so that we don't confuse nodes
int nodeIndex = baseMembers.size();
for (int i = 0; i < nodeChanges.length; i++) {
int nodesToAdd = nodeChanges[i][0];
int nodesToRemove = nodeChanges[i][1];
if (nodesToRemove > baseMembers.size())
break;
if (nodesToRemove == baseMembers.size() && nodesToAdd == 0)
break;
List<Address> newMembers = new ArrayList<Address>(baseMembers);
HashMap<Address, Float> newCapacityFactors = lfMap != null ?
new HashMap<Address, Float>(lfMap) : null;
for (int k = 0; k < nodesToRemove; k++) {
int indexToRemove = Math.abs(baseCH.getHashFunction().hash(k) % newMembers.size());
if (newCapacityFactors != null) {
newCapacityFactors.remove(newMembers.get(indexToRemove));
}
newMembers.remove(indexToRemove);
}