// validation; opposed to the DateHistogramFacetParser the distinctField and interval is also required
if (keyField == null) {
throw new FacetPhaseExecutionException(facetName, "key field is required to be set for distinct histogram facet, either using [field] or using [key_field]");
}
FieldMapper keyMapper = context.smartNameFieldMapper(keyField);
if (keyMapper == null) {
throw new FacetPhaseExecutionException(facetName, "(key) field [" + keyField + "] not found");
} else if (!keyMapper.fieldDataType().getType().equals("long")) {
throw new FacetPhaseExecutionException(facetName, "(key) field [" + keyField + "] is not of type date");
}
if (distinctField == null) {
throw new FacetPhaseExecutionException(facetName, "distinct field is required to be set for distinct histogram facet, either using [value_field] or using [distinctField]");
}
FieldMapper distinctFieldMapper = context.smartNameFieldMapper(distinctField);
if (distinctFieldMapper == null) {
throw new FacetPhaseExecutionException(facetName, "no mapping found for " + distinctField);
}
if (!intervalSet) {
throw new FacetPhaseExecutionException(facetName, "[interval] is required to be set for distinct histogram facet");
}
// this is specific to the "Distinct" DateHistogram. Use a MutableDateTime to take care of the interval and rounding.
// we set the rounding after we set the zone, for it to take affect
if (sInterval != null) {
int index = sInterval.indexOf(':');
if (index != -1) {
// set with rounding
DateFieldParser fieldParser = dateFieldParsers.get(sInterval.substring(0, index));
if (fieldParser == null) {
throw new FacetPhaseExecutionException(facetName, "failed to parse interval [" + sInterval + "] with custom rounding using built in intervals (year/month/...)");
}
DateTimeField field = fieldParser.parse(dateTime.getChronology());
int rounding = this.rounding.get(sInterval.substring(index + 1));
if (rounding == -1) {
throw new FacetPhaseExecutionException(facetName, "failed to parse interval [" + sInterval + "], rounding type [" + (sInterval.substring(index + 1)) + "] not found");
}
dateTime.setRounding(field, rounding);
} else {
DateFieldParser fieldParser = dateFieldParsers.get(sInterval);
if (fieldParser != null) {
DateTimeField field = fieldParser.parse(dateTime.getChronology());
dateTime.setRounding(field, MutableDateTime.ROUND_FLOOR);
} else {
// time interval
try {
interval = TimeValue.parseTimeValue(sInterval, null).millis();
} catch (Exception e) {
throw new FacetPhaseExecutionException(facetName, "failed to parse interval [" + sInterval + "], tried both as built in intervals (year/month/...) and as a time format");
}
}
}
}
if (distinctFieldMapper.fieldDataType().getType().equals("string")) {
PagedBytesIndexFieldData distinctFieldData = context.fieldData().getForField(distinctFieldMapper);
PackedArrayIndexFieldData keyIndexFieldData = context.fieldData().getForField(keyMapper);
return new StringDistinctDateHistogramFacetExecutor(keyIndexFieldData, distinctFieldData, dateTime, interval, comparatorType, context.cacheRecycler());
} else if (distinctFieldMapper.fieldDataType().getType().equals("long")) {
IndexNumericFieldData distinctFieldData = context.fieldData().getForField(distinctFieldMapper);
IndexNumericFieldData keyIndexFieldData = context.fieldData().getForField(keyMapper);
return new LongDistinctDateHistogramFacetExecutor(keyIndexFieldData, distinctFieldData, dateTime, interval, comparatorType, context.cacheRecycler());
} else {
throw new FacetPhaseExecutionException(facetName, "distinct field [" + distinctField + "] is not of type string or long");