List results = new ArrayList();
List<CountExecutor> totalCountExecutors = new ArrayList<CountExecutor>();
try {
for (int i = 0; (i < queries.size()) && (count < maxFeatures); i++) {
Query query = queries.get(i);
try {
//alias sanity check
if (!query.getAliases().isEmpty()) {
if (query.getAliases().size() != query.getTypeNames().size()) {
throw new WFSException(request, String.format("Query specifies %d type names and %d " +
"aliases, must be equal", query.getTypeNames().size(), query.getAliases().size()));
}
}
List<FeatureTypeInfo> metas = new ArrayList();
for (QName typeName : query.getTypeNames()) {
metas.add(featureTypeInfo(typeName, request));
}
//first is the primary feature type
FeatureTypeInfo meta = metas.get(0);
// parse the requested property names and distribute among requested types
List<List<String>> reqPropertyNames = parsePropertyNames(query, metas);
NamespaceSupport ns = getNamespaceSupport();
List<List<PropertyName>> propNames = new ArrayList();
List<List<PropertyName>> allPropNames = new ArrayList();
for (int j = 0; j < metas.size(); j++) {
List<String> propertyNames = reqPropertyNames.get(j);
List<PropertyName> metaPropNames = null;
List<PropertyName> metaAllPropNames = null;
if (!propertyNames.isEmpty()){
metaPropNames = new ArrayList<PropertyName>();
for (Iterator iter = propertyNames.iterator(); iter.hasNext();) {
PropertyName propName = createPropertyName((String) iter.next(), ns);
if ( propName.evaluate(meta.getFeatureType()) == null) {
String mesg = "Requested property: " + propName + " is " + "not available "
+ "for " + meta.getPrefixedName() + ". ";
if (meta.getFeatureType() instanceof SimpleFeatureType) {
List<AttributeTypeInfo> atts = meta.attributes();
List attNames = new ArrayList( atts.size() );
for ( AttributeTypeInfo att : atts ) {
attNames.add( att.getName() );
}
mesg += "The possible propertyName values are: " + attNames;
}
throw new WFSException(request, mesg, "InvalidParameterValue");
}
metaPropNames.add(propName);
}
// if we need to force feature bounds computation, we have to load
// all of the geometries, but we'll have to remove them in the
// returned feature type
if(wfs.isFeatureBounding()) {
metaAllPropNames = addGeometryProperties(meta, metaPropNames);
} else {
metaAllPropNames = metaPropNames;
}
//we must also include any properties that are mandatory ( even if not requested ),
// ie. those with minOccurs > 0
//only do this for simple features, complex mandatory features are handled by app-schema
if (meta.getFeatureType() instanceof SimpleFeatureType) {
metaAllPropNames =
DataUtilities.addMandatoryProperties((SimpleFeatureType) meta.getFeatureType(), metaAllPropNames);
metaPropNames =
DataUtilities.addMandatoryProperties((SimpleFeatureType) meta.getFeatureType(), metaPropNames);
}
//for complex features, mandatory properties need to be handled by datastore.
}
allPropNames.add(metaAllPropNames);
propNames.add(metaPropNames);
}
//set up joins (if specified)
List<Join> joins = null;
String primaryAlias = null;
QName primaryTypeName = query.getTypeNames().get(0);
FeatureTypeInfo primaryMeta = metas.get(0);
//make sure filters are sane
//
// Validation of filters on non-simple feature types is not yet supported.
// FIXME: Support validation of filters on non-simple feature types:
// need to consider xpath properties and how to configure namespace prefixes in
// GeoTools app-schema FeaturePropertyAccessorFactory.
Filter filter = query.getFilter();
if (filter == null && metas.size() > 1) {
throw new WFSException(request, "Join query must specify a filter");
}
if (filter != null) {
if (meta.getFeatureType() instanceof SimpleFeatureType) {
if (metas.size() > 1) {
//ensure that the filter is allowable
if (!isValidJoinFilter(filter)) {
throw new WFSException(request,
"Unable to preform join with specified filter: " + filter);
}
// join, need to separate the joining filter from other filters
JoinExtractingVisitor extractor =
new JoinExtractingVisitor(metas, query.getAliases());
filter.accept(extractor, null);
primaryAlias = extractor.getPrimaryAlias();
primaryMeta = extractor.getPrimaryFeatureType();
primaryTypeName = new QName(primaryMeta.getNamespace().getURI(),
primaryMeta.getNativeName());
joins = extractor.getJoins();
if (joins.size() != metas.size()-1) {
throw new WFSException(request, String.format("Query specified %d types but %d " +
"join filters were found", metas.size(), extractor.getJoins().size()));
}
//validate the filter for each join
for (int j = 1; j < metas.size(); j++) {
Join join = joins.get(j-1);
if (join.getFilter() != null) {
validateFilter(join.getFilter(), query, metas.get(j), request);
}
}
filter = extractor.getPrimaryFilter();
if (filter != null) {
validateFilter(filter, query, meta, request);
}
}
else {
validateFilter(filter, query, meta, request);
}
} else {
BBOXNamespaceSettingVisitor filterVisitor = new BBOXNamespaceSettingVisitor(ns);
filter.accept(filterVisitor, null);
}
}
// validate sortby if present
List<SortBy> sortBy = query.getSortBy();
if (sortBy != null && !sortBy.isEmpty()
&& meta.getFeatureType() instanceof SimpleFeatureType) {
validateSortBy(sortBy, meta, request);
}
// load primary feature source
Hints hints = null;
if (joins != null) {
hints = new Hints(ResourcePool.JOINS, joins);
}
FeatureSource<? extends FeatureType, ? extends Feature> source =
primaryMeta.getFeatureSource(null, hints);
// handle local maximum
int queryMaxFeatures = maxFeatures - count;
int metaMaxFeatures = maxFeatures(metas);
if (metaMaxFeatures > 0 && metaMaxFeatures < queryMaxFeatures) {
queryMaxFeatures = metaMaxFeatures;
}
Map<String, String> viewParam = viewParams != null ? viewParams.get(i) : null;
org.geotools.data.Query gtQuery = toDataQuery(query, filter, offset,
queryMaxFeatures, source, request, allPropNames.get(0), viewParam,
joins, primaryTypeName, primaryAlias);
LOGGER.fine("Query is " + query + "\n To gt2: " + gtQuery);
FeatureCollection<? extends FeatureType, ? extends Feature> features = getFeatures(request, source, gtQuery);
// For complex features, we need the targetCrs and version in scenario where we have
// a top level feature that does not contain a geometry(therefore no crs) and has a
// nested feature that contains geometry as its property.Furthermore it is possible
// for each nested feature to have different crs hence we need to reproject on each
// feature accordingly.
if (!(meta.getFeatureType() instanceof SimpleFeatureType)) {
features.getSchema().getUserData().put("targetCrs", query.getSrsName());
features.getSchema().getUserData().put("targetVersion", request.getVersion());
}
//feature collection size, we may need to calculate it
boolean calculateSize = true;
// optimization: WFS 1.0 does not require count unless we have multiple query elements
// and we are asked to perform a global limit on the results returned
calculateSize = !(("1.0".equals(request.getVersion()) || "1.0.0".equals(request.getVersion())) &&
(queries.size() == 1 || maxFeatures == Integer.MAX_VALUE));
if (!calculateSize) {
//if offset was specified and we have more queries left in this request then we
// must calculate size in order to adjust the offset
calculateSize = offset > 0 && i < queries.size() - 1;
}
int size = 0;
if (calculateSize) {
size = features.size();
}
//update the count
count += size;
//if offset is present we need to check the size of this returned feature collection
// and adjust the offset for the next feature collection accordingly
if (offset > 0) {
if (size > 0) {
//features returned, offset can be set to zero
offset = 0;
}
else {
//no features might have been because of the offset that was specified, check
// the size of the same query but with no offset
org.geotools.data.Query q2 = toDataQuery(query, filter, 0,
queryMaxFeatures, source, request, allPropNames.get(0),
viewParam, joins, primaryTypeName, primaryAlias);
//int size2 = getFeatures(request, source, q2).size();
int size2 = source.getCount(q2);
if (size2 > 0) {
//adjust the offset for the next query
offset = Math.max(0, offset - size2);
}
}
}
// collect queries required to return numberMatched/totalSize
// check maxFeatures and offset, if they are unset we can use the size we
// calculated above
isNumberMatchedSkipped = meta.getSkipNumberMatched();
if (!isNumberMatchedSkipped) {
if (calculateSize && queryMaxFeatures == Integer.MAX_VALUE && offset == 0) {
totalCountExecutors.add(new CountExecutor(size));
} else {
org.geotools.data.Query qTotal = toDataQuery(query, filter, 0,
Integer.MAX_VALUE, source, request, allPropNames.get(0), viewParam,
joins, primaryTypeName, primaryAlias);
totalCountExecutors.add(new CountExecutor(source, qTotal));
}
}
// we may need to shave off geometries we did load only to make bounds
// computation happy
// TODO: support non-SimpleFeature geometry shaving
List<PropertyName> metaPropNames = propNames.get(0);
if(features.getSchema() instanceof SimpleFeatureType && metaPropNames!=null && metaPropNames.size() < allPropNames.get(0).size()) {
String[] residualNames = new String[metaPropNames.size()];
Iterator<PropertyName> it = metaPropNames.iterator();
int j =0;
while (it.hasNext()) {
residualNames[j] = it.next().getPropertyName();
j++;
}
SimpleFeatureType targetType = DataUtilities.createSubType((SimpleFeatureType) features.getSchema(), residualNames);
features = new FeatureBoundsFeatureCollection((SimpleFeatureCollection) features, targetType);
}
//JD: TODO reoptimize
// if ( i == request.getQuery().size() - 1 ) {
// //DJB: dont calculate feature count if you dont have to. The MaxFeatureReader will take care of the last iteration
// maxFeatures -= features.getCount();
// }
//GR: I don't know if the featuresults should be added here for later
//encoding if it was a lock request. may be after ensuring the lock
//succeed?
results.add(features);
}
catch(WFSException e) {
//intercept and set locator to query handle if one was set, or if it simply set
// to GetFeature, which is the default
if (query.getHandle() != null &&
(e.getLocator() == null || "GetFeature".equalsIgnoreCase(e.getLocator()))) {
e.setLocator(query.getHandle());
}
throw e;
}
}
// total count represents the total count of the features matched for this query in cases
// where the client has limited the result set size, as an optimization we only calculate
// this if the following conditions hold
// 1. the request is wfs 2.0
// 2. maxFeatures != Integer.MAX_VALUE
//TODO: we could actually add a third a optimization that when the count of features is
// less than maxFeatures we don't have to calculate it since it is the same as count, but
// this requires that we do that check post query loop which requires a bit of code
// refactoring
// we need the total count only for WFS 2.0
if (!request.getVersion().startsWith("2")) {
totalCount = -1;
} else {
if (isNumberMatchedSkipped) {
totalCount = -1;
totalOffset = 0;
} else {
// optimization: if count < max features then total count == count
if(count < maxFeatures) {
totalCount = count;
} else {
// ok, in this case we're forced to run the queries to discover the actual total count
for (CountExecutor q : totalCountExecutors) {
int result = q.getCount();
// if the count is unknown for one, we don't know the total, period
if(result == -1) {
totalCount = -1;
break;
} else {
totalCount += result;
}
}
}
}
}
} catch (IOException e) {
throw new WFSException(request, "Error occurred getting features", e, request.getHandle());
} catch (SchemaException e) {
throw new WFSException(request, "Error occurred getting features", e, request.getHandle());
}
//locking
String lockId = null;
if (request.isLockRequest()) {
LockFeatureRequest lockRequest = request.createLockRequest();
lockRequest.setExpiry(request.getExpiry());
lockRequest.setHandle(request.getHandle());
lockRequest.setLockActionAll();
for (int i = 0; i < queries.size(); i++) {
Query query = queries.get(i);
Lock lock = lockRequest.createLock();
lock.setFilter(query.getFilter());
lock.setHandle(query.getHandle());
//TODO: joins?
List<QName> typeNames = query.getTypeNames();
lock.setTypeName(typeNames.get(0));
lockRequest.addLock(lock);
}
LockFeature lockFeature = new LockFeature(wfs, catalog);