}
protected void integrityValidation(Map stores, Envelope check)
throws IOException, WfsTransactionException {
Data catalog = request.getWFS().getData();
ValidationProcessor validation = request.getValidationProcessor();
if( validation == null ) {
LOGGER.warning( "Validation Processor unavaialble" );
return;
}
LOGGER.finer( "Required to validate "+stores.size()+" typeRefs" );
LOGGER.finer( "within "+check );
// go through each modified typeName
// and ask what we need to check
//
Set typeRefs = new HashSet();
for (Iterator i = stores.keySet().iterator(); i.hasNext();) {
String typeRef = (String) i.next();
typeRefs.add( typeRef );
Set dependencies = validation.getDependencies( typeRef );
LOGGER.finer( "typeRef "+typeRef+" requires "+dependencies);
typeRefs.addAll( dependencies );
}
// Grab a source for each typeName we need to check
// Grab from the provided stores - so we check against
// the transaction
//
Map sources = new HashMap();
for (Iterator i = typeRefs.iterator(); i.hasNext();) {
String typeRef = (String) i.next();
LOGGER.finer("Searching for required typeRef: " + typeRef );
if (stores.containsKey( typeRef )) {
LOGGER.finer(" found required typeRef: " + typeRef +" (it was already loaded)");
sources.put( typeRef, stores.get(typeRef));
} else {
// These will be using Transaction.AUTO_COMMIT
// this is okay as they were not involved in our
// Transaction...
LOGGER.finer(" could not find typeRef: " + typeRef +" (we will now load it)");
String split[] = typeRef.split(":");
String dataStoreId = split[0];
String typeName = split[1];
LOGGER.finer(" going to look for dataStoreId:"+dataStoreId+" and typeName:"+typeName );
// FeatureTypeInfo meta = catalog.getFeatureTypeInfo(typeName);
String uri = catalog.getDataStoreInfo( dataStoreId ).getNameSpace().getURI();
LOGGER.finer(" sorry I mean uri: " + uri +" and typeName:"+typeName );
FeatureTypeInfo meta = catalog.getFeatureTypeInfo( typeName, uri );
if( meta == null ){
throw new IOException( "Could not find typeRef:"+typeRef +" for validation processor" );
}
LOGGER.finer(" loaded required typeRef: " + typeRef );
sources.put( typeRef, meta.getFeatureSource());
}
}
LOGGER.finer( "Total of "+sources.size()+" featureSource marshalled for testing" );
final Map failed = new TreeMap();
ValidationResults results = new ValidationResults() {
String name;
String description;
public void setValidation(Validation validation) {
name = validation.getName();
description = validation.getDescription();
}
public void error(Feature feature, String message) {
LOGGER.warning(name + ": " + message + " (" + description
+ ")");
if (feature == null) {
failed.put("ALL",
name + ": " + message + " " + "(" + description + ")");
} else {
failed.put(feature.getID(),
name + ": " + message + " " + "(" + description + ")");
}
}
public void warning(Feature feature, String message) {
LOGGER.warning(name + ": " + message + " (" + description
+ ")");
}
};
try {
//should never be null, but confDemo is giving grief, and I
//don't want transactions to mess up just because validation
//stuff is messed up. ch
LOGGER.finer("Runing integrity tests using validation processor ");
validation.runIntegrityTests(stores.keySet(), sources, check, results);
} catch (Exception badIdea) {
badIdea.printStackTrace();
// ValidationResults should of handled stuff will redesign :-)
throw new DataSourceException("Validation Failed", badIdea);
}