* then for each newly added mapping, the default target is set as provided.
*/
public void initTransformer(Double defaultTarget) {
if (scale instanceof FreeStringScale) {
FreeStringScale freeScale = (FreeStringScale) scale;
// We collect all distinct actually EXISTING values
OrdinalTransformer t = (OrdinalTransformer) transformer;
Map<String, TargetValueObject> map = t.getMapping();
HashSet<String> allValues = new HashSet<String>();
for (Values values: valueMap.values()) {
for (Value v : values.getList()) {
FreeStringValue text = (FreeStringValue) v;
if (!text.toString().equals("")) {
for (String s: map.keySet()) {
// if the value is NOT the same, but IS the same with other case,
// we replace the value with the cases predefined by the mapping
if (text.getValue().equalsIgnoreCase(s) && !text.getValue().equals(s)) {
text.setValue(s);
}
}
allValues.add(text.getValue());
}
}
}
// We remove all values from the transformer that do not actually occur (anymore)
// I am disabling this for now - why would we want to remove known mappings?
// They don't do harm because for the lookup, we use the actually encountered values
// (see below)
// HashSet<String> keysToRemove = new HashSet<String>();
// for (String s: map.keySet()) {
// if (!allValues.contains(s)) {
// keysToRemove.add(s);
// }
// }
// for (String s: keysToRemove) {
// map.remove(s);
// }
// We add all values that occur, but dont are not in the map yet:
for (String s: allValues) {
if (!map.containsKey(s)) {
if (defaultTarget == null) {
map.put(s, new TargetValueObject());
} else {
map.put(s, new TargetValueObject(defaultTarget.doubleValue()));
}
}
}
// We also have to publish the known values
// to the SCALE because it provides the reference lookup
// for iterating and defining the transformation
freeScale.setPossibleValues(allValues);
}
}