// and configure them with our transaction
//
// (I am using element rather than transaction sub request
// to agree with the spec docs)
for (int i = 0; i < request.getSubRequestSize(); i++) {
SubTransactionRequest element = request.getSubRequest(i);
String typeRef = null;
String elementName = null;
FeatureTypeInfo meta = null;
if (element instanceof InsertRequest) {
// Option 1: Guess FeatureStore based on insert request
//
Feature feature = ((InsertRequest) element).getFeatures()
.features().next();
if (feature != null) {
String name = feature.getFeatureType().getTypeName();
URI uri = feature.getFeatureType().getNamespace();
LOGGER.fine("Locating FeatureSource uri:'"+uri+"' name:'"+name+"'");
meta = catalog.getFeatureTypeInfo(name, uri==null?null:uri.toString()); //change suggested by DZweirs
//HACK: The insert request does not get the correct typename,
//as we can no longer hack in the prefix since we are using the
//real featureType. So this is the only good place to do the
//look-up for the internal typename to use. We should probably
//rethink our use of prefixed internal typenames (cdf:bc_roads),
//and have our requests actually use a type uri and type name.
//Internally we can keep the prefixes, but if we do that then
//this will be less hacky and we'll also be able to read in xml
//for real, since the prefix should refer to the uri.
//
// JG:
// Transalation Insert does not have a clue about prefix - this provides the clue
element.setTypeName( meta.getNameSpace().getPrefix()+":"+meta.getTypeName() );
}
else {
LOGGER.finer("Insert was empty - does not need a FeatuerSoruce");
continue; // insert is actually empty
}
}
else {
// Option 2: lookup based on elmentName (assume prefix:typeName)
typeRef = null; // unknown at this time
elementName = element.getTypeName();
if( stores.containsKey( elementName )) {
LOGGER.finer("FeatureSource '"+elementName+"' already loaded." );
continue;
}
LOGGER.fine("Locating FeatureSource '"+elementName+"'...");
meta = catalog.getFeatureTypeInfo(elementName);
element.setTypeName( meta.getNameSpace().getPrefix()+":"+meta.getTypeName() );
}
typeRef = meta.getDataStoreInfo().getId()+":"+meta.getTypeName();
elementName = meta.getNameSpace().getPrefix()+":"+meta.getTypeName();
LOGGER.fine("located FeatureType w/ typeRef '"+typeRef+"' and elementName '"+elementName+"'" );
if (stores.containsKey(elementName)) {
// typeName already loaded
continue;
}
try {
FeatureSource source = meta.getFeatureSource();
if (source instanceof FeatureStore) {
FeatureStore store = (FeatureStore) source;
store.setTransaction(transaction);
stores.put( elementName, source );
stores2.put( typeRef, source );
} else {
throw new WfsTransactionException(elementName
+ " is read-only", element.getHandle(),
request.getHandle());
}
} catch (IOException ioException) {
throw new WfsTransactionException(elementName
+ " is not available:" + ioException,
element.getHandle(), request.getHandle());
}
}
// provide authorization for transaction
//
String authorizationID = request.getLockId();
if (authorizationID != null) {
if ((request.getWFS().getServiceLevel() & WFSDTO.SERVICE_LOCKING) == 0) {
// could we catch this during the handler, rather than during execution?
throw new ServiceException("Lock support is not enabled");
}
LOGGER.finer("got lockId: " + authorizationID);
if (!catalog.lockExists(authorizationID)) {
String mesg = "Attempting to use a lockID that does not exist"
+ ", it has either expired or was entered wrong.";
throw new WfsException(mesg);
}
try {
transaction.addAuthorization(authorizationID);
} catch (IOException ioException) {
// This is a real failure - not associated with a element
//
throw new WfsException("Authorization ID '" + authorizationID
+ "' not useable", ioException);
}
}
// execute elements in order,
// recording results as we go
//
// I will need to record the damaged area for
// pre commit validation checks
//
Envelope envelope = new Envelope();
for (int i = 0; i < request.getSubRequestSize(); i++) {
SubTransactionRequest element = request.getSubRequest(i);
// We expect element name to be of the format prefix:typeName
// We take care to force the insert element to have this format above.
//
String elementName = element.getTypeName();
String handle = element.getHandle();
FeatureStore store = (FeatureStore) stores.get(elementName);
if( store == null ){
throw new ServiceException( "Could not locate FeatureStore for '"+elementName+"'" );
}
String typeName = store.getSchema().getTypeName();
if (element instanceof DeleteRequest) {
if ((request.getWFS().getServiceLevel() & WFSDTO.SERVICE_DELETE) == 0) {
// could we catch this during the handler, rather than during execution?
throw new ServiceException(
"Transaction Delete support is not enabled");
}
DeleteRequest delete = (DeleteRequest) element;
//do a check for Filter.NONE, the spec specifically does not
// allow this
if (delete.getFilter() == Filter.NONE) {
throw new ServiceException(
"Filter must be supplied for Transaction Delete"
);
}
LOGGER.finer( "Transaction Delete:"+element );
try {
Filter filter = delete.getFilter();
Envelope damaged = store.getBounds(new DefaultQuery(
delete.getTypeName(), filter));
if (damaged == null) {
damaged = store.getFeatures(filter).getBounds();
}
if ((request.getLockId() != null)
&& store instanceof FeatureLocking
&& (request.getReleaseAction() == TransactionRequest.SOME)) {
FeatureLocking locking = (FeatureLocking) store;
// TODO: Revisit Lock/Delete interaction in gt2
if (false) {
// REVISIT: This is bad - by releasing locks before
// we remove features we open ourselves up to the danger
// of someone else locking the features we are about to
// remove.
//
// We cannot do it the other way round, as the Features will
// not exist
//
// We cannot grab the fids offline using AUTO_COMMIT
// because we may have removed some of them earlier in the
// transaction
//
locking.unLockFeatures(filter);
store.removeFeatures(filter);
} else {
// This a bit better and what should be done, we will
// need to rework the gt2 locking api to work with
// fids or something
//
// The only other thing that would work would be
// to specify that FeatureLocking is required to
// remove locks when removing Features.
//
// While that sounds like a good idea, it would be
// extra work when doing release mode ALL.
//
DataStore data = store.getDataStore();
FilterFactory factory = FilterFactory
.createFilterFactory();
FeatureWriter writer;
writer = data.getFeatureWriter(typeName, filter,
transaction);
try {
while (writer.hasNext()) {
String fid = writer.next().getID();
locking.unLockFeatures(factory
.createFidFilter(fid));
writer.remove();
}
} finally {
writer.close();
}
store.removeFeatures(filter);
}
} else {
// We don't have to worry about locking right now
//
store.removeFeatures(filter);
}
envelope.expandToInclude(damaged);
} catch (IOException ioException) {
throw new WfsTransactionException(ioException.getMessage(),
element.getHandle(), request.getHandle());
}
}
if (element instanceof InsertRequest) {
if ((request.getWFS().getServiceLevel() & WFSDTO.SERVICE_INSERT) == 0) {
// could we catch this during the handler, rather than during execution?
throw new ServiceException(
"Transaction INSERT support is not enabled");
}
LOGGER.finer( "Transasction Insert:"+element );
try {
InsertRequest insert = (InsertRequest) element;
FeatureCollection collection = insert.getFeatures();
FeatureReader reader = DataUtilities.reader(collection);
FeatureType schema = store.getSchema();
// Need to use the namespace here for the lookup, due to our weird
// prefixed internal typenames. see
// http://jira.codehaus.org/secure/ViewIssue.jspa?key=GEOS-143
// Once we get our datastores making features with the correct namespaces
// we can do something like this:
// FeatureTypeInfo typeInfo = catalog.getFeatureTypeInfo(schema.getTypeName(), schema.getNamespace());
// until then (when geos-144 is resolved) we're stuck with:
FeatureTypeInfo typeInfo = catalog.getFeatureTypeInfo(element.getTypeName() );
// this is possible with the insert hack above.
LOGGER.finer("Use featureValidation to check contents of insert" );
featureValidation( typeInfo.getDataStoreInfo().getId(), schema, collection );
Set fids = store.addFeatures(reader);
build.addInsertResult(element.getHandle(), fids);
//
// Add to validation check envelope
envelope.expandToInclude(collection.getBounds());
} catch (IOException ioException) {
throw new WfsTransactionException(ioException,
element.getHandle(), request.getHandle());
}
}
if (element instanceof UpdateRequest) {
if ((request.getWFS().getServiceLevel() & WFSDTO.SERVICE_UPDATE) == 0) {
// could we catch this during the handler, rather than during execution?
throw new ServiceException(
"Transaction Update support is not enabled");
}
LOGGER.finer( "Transaction Update:"+element);
try {
UpdateRequest update = (UpdateRequest) element;
Filter filter = update.getFilter();
AttributeType[] types = update.getTypes(store.getSchema());
Object[] values = update.getValues();
DefaultQuery query = new DefaultQuery(update.getTypeName(),
filter);
// Pass through data to collect fids and damaged region
// for validation
//
Set fids = new HashSet();
LOGGER.finer("Preprocess to remember modification as a set of fids" );
FeatureReader preprocess = store.getFeatures( filter ).reader();
try {
while( preprocess.hasNext() ){
Feature feature = preprocess.next();
fids.add( feature.getID() );
envelope.expandToInclude( feature.getBounds() );
}
} catch (NoSuchElementException e) {
throw new ServiceException( "Could not aquire FeatureIDs", e );
} catch (IllegalAttributeException e) {
throw new ServiceException( "Could not aquire FeatureIDs", e );
}
finally {
preprocess.close();
}
try{
if (types.length == 1) {
store.modifyFeatures(types[0], values[0], filter);
} else {
store.modifyFeatures(types, values, filter);
}
}catch (IOException e) //DJB: this is for cite tests. We should probaby do this for all the exceptions here - throw a transaction FAIL instead of serice exception
{
//this failed - we want a FAILED not a service exception!
build = new WfsTransResponse(WfsTransResponse.FAILED,
transactionRequest.getGeoServer().isVerbose());
// add in exception details here??
build.setMessage(e.getLocalizedMessage());
response = build;
// DJB: it looks like the transaction is rolled back in writeTo()
return;
}
if ((request.getLockId() != null)
&& store instanceof FeatureLocking
&& (request.getReleaseAction() == TransactionRequest.SOME)) {
FeatureLocking locking = (FeatureLocking) store;
locking.unLockFeatures(filter);
}
// Post process - check features for changed boundary and
// pass them off to the ValidationProcessor
//
if( !fids.isEmpty() ) {
LOGGER.finer("Post process update for boundary update and featureValidation");
FidFilter modified = FilterFactory.createFilterFactory().createFidFilter();
modified.addAllFids( fids );
FeatureCollection changed = store.getFeatures( modified ).collection();
envelope.expandToInclude( changed.getBounds() );
FeatureTypeInfo typeInfo = catalog.getFeatureTypeInfo(element.getTypeName());
featureValidation(typeInfo.getDataStoreInfo().getId(),store.getSchema(), changed);
}
} catch (IOException ioException) {
throw new WfsTransactionException(ioException,
element.getHandle(), request.getHandle());
} catch (SchemaException typeException) {
throw new WfsTransactionException(typeName
+ " inconsistent with update:"
+ typeException.getMessage(), element.getHandle(),
request.getHandle());
}
}
}