//afterMethod = false;
//System.out.println("insertRelated begin");
Hive hive = getHive();
//System.out.println(hive.toString());
//insertKeys(getHive());
Directory d = getDirectory();
for (String primaryIndexKey : getPrimaryIndexOrResourceKeys()) {
hive.directory().insertPrimaryIndexKey(primaryIndexKey);
d.insertResourceId(resource, primaryIndexKey, primaryIndexKey);
Map<SecondaryIndex, Collection<Object>> secondaryIndexKeyMap = new Hashtable<SecondaryIndex, Collection<Object>>();
secondaryIndexKeyMap.put(nameIndex, Arrays.asList(new Object[]{
secondaryKeyString
}));
secondaryIndexKeyMap.put(numIndex, Arrays.asList(new Object[]{
secondaryKeyNum
}));
// TODO: for some reason the BatchIndexWriter won't find the tables when running through maven
//d.batch().insertSecondaryIndexKeys(secondaryIndexKeyMap, primaryIndexKey);
for (SecondaryIndex secondaryIndex : secondaryIndexKeyMap.keySet()) {
for (Object secondaryIndexKeyNum : secondaryIndexKeyMap.get(secondaryIndex)) {
hive.directory().insertSecondaryIndexKey(secondaryIndex.getResource().getName(), secondaryIndex.getName(), secondaryIndexKeyNum, primaryIndexKey);
}
}
hive.directory().insertSecondaryIndexKey(numIndex.getResource().getName(), numIndex.getName(), secondaryKeyNum, primaryIndexKey);
assertEquals(1, d.getSecondaryIndexKeysOfResourceId(nameIndex, primaryIndexKey).size());
assertEquals(secondaryKeyString, Atom.getFirst(d.getSecondaryIndexKeysOfResourceId(nameIndex, primaryIndexKey)));
assertEquals(1,
d.getSecondaryIndexKeysOfResourceId(numIndex, primaryIndexKey).size());
assertEquals(secondaryKeyNum,
Atom.getFirst(d.getSecondaryIndexKeysOfResourceId(numIndex, primaryIndexKey)));
}
}