*/
public FieldMatch determineFieldMatches(final String inExplanationText, final String searchText)
{
String explanationText = inExplanationText;
FieldMatch matchedKeywords = new FieldMatch();
if (fieldsToAnalyze.size() == 0)
{
return matchedKeywords;
}
log.debug("Explanation:" + explanationText);
// Remove the boost values, makes things easier...
Pattern boostPattern = Pattern.compile("\\^[0-9]+.[0-9]+");
Matcher boostPatternMatcher = boostPattern.matcher(explanationText);
explanationText = boostPatternMatcher.replaceAll("");
// convert the keywords to the analyzed form, then store them in a hashtable of <tokenizedForm, originalKeyword>
Map<String, String> tokenizedKeywords = tokenizeKeywords(searchText);
// We now have a Map with the tokenized keyword as the key, the original search word as the value.
// Start looking through the explanation for the values
for (String fieldName : fieldsToAnalyze)
{
Pattern weightPattern = Pattern.compile("\\sweight\\(" + fieldName + ":(\\w+)\\s",
java.util.regex.Pattern.CASE_INSENSITIVE | java.util.regex.Pattern.MULTILINE);
Matcher m = weightPattern.matcher(explanationText);
boolean result = m.find();
while (result)
{
matchedKeywords.addMatch(fieldName, tokenizedKeywords.get(m.group(1)));
result = m.find();
}
}
return matchedKeywords;
}