final ExecutionUnitHelper helper = new ExecutionUnitHelper(unit);
// extract record information
SourceUri sourceUri = record.getSourceUri();
RequestContext context = RequestContext.extract(null);
// get report builder
ReportBuilder rp = helper.getReportBuilder();
try {
// immediately remove from the source URI's collection
if (unit.getCleanupFlag()) {
helper.getSourceUris().remove("uri", sourceUri.asString());
}
boolean proceed =
(ProtocolInvoker.getUpdateDefinition(unit.getRepository().getProtocol()) && record instanceof Native)
|| (ProtocolInvoker.getUpdateContent(unit.getRepository().getProtocol()) && !(record instanceof Native));
if (proceed) {
// if there is 'from-date' criteria, check for all non-native records having
// non-null last update date if that date is after 'from-date'. Only such a records
// should be published
if (!(record instanceof Native) && unit.getCriteria().getFromDate() != null) {
Date lastUpdateDate = record.getUpdateDate();
if (lastUpdateDate != null && lastUpdateDate.before(unit.getCriteria().getFromDate())) {
// stop harvesting it
proceed = false;
}
}
}
if (proceed) {
String metadata = "";
try {
// notify listeners
metadata = record.getContent();
listener.onHarvestMetadata(unit.getRepository(), sourceUri, metadata);
// publication request
HarvesterRequest publicationRequest =
new HarvesterRequest(context, unit.getPublisher(), unit.getRepository().getUuid(), sourceUri.asString(), metadata);
publicationRequest.getPublicationRecord().setAutoApprove(ProtocolInvoker.getAutoApprove(unit.getRepository().getProtocol()));
// if this is a repository descriptor, update repository record
if (record instanceof Native && isRepositorySourceUri(sourceUri, unit.getRepository())) {
String sMethod = MmdEnums.PublicationMethod.registration.toString();
publicationRequest.getPublicationRecord().setUuid(unit.getRepository().getUuid());
publicationRequest.getPublicationRecord().setPublicationMethod(sMethod);
publicationRequest.getPublicationRecord().setAlternativeTitle(unit.getRepository().getName());
publicationRequest.getPublicationRecord().setLockTitle(ProtocolInvoker.getLockTitle(unit.getRepository().getProtocol()));
}
publicationRequest.publish();
boolean bReplaced =
publicationRequest.getPublicationRecord().getWasDocumentReplaced();
LOGGER.finer("[SYNCHRONIZER] SUCCEEDED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri);
// notify listeners
listener.onPublishMetadata(unit.getRepository(), sourceUri, publicationRequest.getPublicationRecord().getUuid(), metadata);
// create harvest report entry for the current record
rp.createEntry(sourceUri.asString(), !bReplaced);
} catch (NullReferenceException ex) {
if (LOGGER.isLoggable(Level.FINEST)) {
LOGGER.log(Level.FINEST, "[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri, ex);
} else {
LOGGER.finer("[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri + ", details: " + ex.getMessage());
}
rp.createInvalidEntry(sourceUri.asString(), Arrays.asList(new String[]{ex.getMessage()}));
listener.onPublishException(unit.getRepository(), sourceUri, metadata, ex);
} catch (ValidationException ex) {
if (LOGGER.isLoggable(Level.FINEST)) {
LOGGER.log(Level.FINEST, "[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri, ex);
} else {
LOGGER.finer("[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri + ", details: " + ex.getMessage());
}
ArrayList<String> messages = new ArrayList<String>();
ex.getValidationErrors().buildMessages(messageBroker, messages, true);
rp.createInvalidEntry(sourceUri.asString(), messages);
listener.onPublishException(unit.getRepository(), sourceUri, metadata, ex);
} catch (IllegalArgumentException ex) {
if (LOGGER.isLoggable(Level.FINEST)) {
LOGGER.log(Level.FINEST, "[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri, ex);
} else {
LOGGER.finer("[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri + ", details: " + ex.getMessage());
}
rp.createUnpublishedEntry(sourceUri.asString(), Arrays.asList(new String[]{ex.getMessage()}));
listener.onPublishException(unit.getRepository(), sourceUri, metadata, ex);
} catch (SchemaException ex) {
if (LOGGER.isLoggable(Level.FINEST)) {
LOGGER.log(Level.FINEST, "[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri, ex);
} else {
LOGGER.finer("[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri + ", details: " + ex.getMessage());
}
rp.createInvalidEntry(sourceUri.asString(), Arrays.asList(new String[]{ex.getMessage()}));
listener.onPublishException(unit.getRepository(), sourceUri, metadata, ex);
} catch (ImsServiceException ex) {
if (LOGGER.isLoggable(Level.FINEST)) {
LOGGER.log(Level.FINEST, "[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri, ex);
} else {
LOGGER.finer("[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri + ", details: " + ex.getMessage());
}
rp.createUnpublishedEntry(sourceUri.asString(), Arrays.asList(new String[]{ex.getMessage()}));
listener.onPublishException(unit.getRepository(), sourceUri, metadata, ex);
} catch (SAXException ex) {
if (LOGGER.isLoggable(Level.FINEST)) {
LOGGER.log(Level.FINEST, "[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri, ex);
} else {
LOGGER.finer("[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri + ", details: " + ex.getMessage());
}
rp.createInvalidEntry(sourceUri.asString(), Arrays.asList(new String[]{ex.getMessage()}));
listener.onPublishException(unit.getRepository(), sourceUri, metadata, ex);
} catch (TransformerConfigurationException ex) {
throw ex;
} catch (TransformerException ex) {
if (LOGGER.isLoggable(Level.FINEST)) {
LOGGER.log(Level.FINEST, "[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri, ex);
} else {
LOGGER.finer("[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri + ", details: " + ex.getMessage());
}
rp.createInvalidEntry(sourceUri.asString(), Arrays.asList(new String[]{ex.getMessage()}));
listener.onPublishException(unit.getRepository(), sourceUri, metadata, ex);
} catch (HttpClientException ex) {
if (LOGGER.isLoggable(Level.FINEST)) {
LOGGER.log(Level.FINEST, "[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri, ex);
} else {
LOGGER.finer("[SYNCHRONIZER] FAILED processing metadata #" + (rp.getHarvestedCount() + 1) + " through: " + unit + ", source URI: " + sourceUri + ", details: " + ex.getMessage());
}
rp.createInvalidEntry(sourceUri.asString(), Arrays.asList(new String[]{ex.getMessage()}));
listener.onPublishException(unit.getRepository(), sourceUri, metadata, ex);
}
}
} finally {
context.onExecutionPhaseCompleted();
}
}