if (clickstreams != null && !clickstreams.isEmpty()) {
// Clean not html values or exluded patterns
List<ClickstreamRequest> cleanClickstreams = new ArrayList<ClickstreamRequest>();
Iterator<ClickstreamRequest> it = clickstreams.iterator();
while (it.hasNext()) {
ClickstreamRequest clickstream = (ClickstreamRequest) it.next();
String uri = clickstream.getRequestURI();
if (uri.endsWith(".html")) {
// TEST IF THE URL IS EXCLUDE
CharSequence[] excludedPatternsCharSequence = excludedPatterns.toArray(new CharSequence[excludedPatterns.size()]);
boolean isExclude = false;
for (int i = 0; i < excludedPatternsCharSequence.length; i++) {
CharSequence string = excludedPatternsCharSequence[i];
if (uri.contains(string)) {
isExclude = true;
}
}
if (BooleanUtils.negate(isExclude)) {
cleanClickstreams.add(clickstream);
}
}
}
if (cleanClickstreams.size() == 1) {
Iterator<ClickstreamRequest> itCleanClickstreams = cleanClickstreams.iterator();
while (itCleanClickstreams.hasNext()) {
ClickstreamRequest clickstream = (ClickstreamRequest) itCleanClickstreams.next();
String uri = clickstream.getRequestURI();
url = uri;
}
} else {
Iterator<ClickstreamRequest> itCleanClickstreams = cleanClickstreams.iterator();
int countCleanClickstream = 1;
while (itCleanClickstreams.hasNext()) {
ClickstreamRequest clickstream = (ClickstreamRequest) itCleanClickstreams.next();
String uri = clickstream.getRequestURI();
// The last url is the current URI, so we need to get the url previous the last
if (countCleanClickstream == (cleanClickstreams.size() - position)) {
url = uri;
}
countCleanClickstream++;