}
private CompiledViewDefinitionWithGraphs getCompiledViewDefinition(final Instant valuationTime, final VersionCorrection versionCorrection) {
final long functionInitId = getProcessContext().getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
updateViewDefinitionIfRequired();
CompiledViewDefinitionWithGraphs compiledViewDefinition = null;
final Pair<Lock, Lock> executionCacheLocks = getProcessContext().getExecutionCacheLock().get(_executionCacheKey, valuationTime, versionCorrection);
executionCacheLocks.getSecond().lock();
executionCacheLocks.getFirst().lock();
boolean broadLock = true;
try {
Map<String, Pair<DependencyGraph, Set<ValueRequirement>>> previousGraphs = null;
ConcurrentMap<ComputationTargetReference, UniqueId> previousResolutions = null;
Set<UniqueId> changedPositions = null;
Set<UniqueId> unchangedNodes = null;
if (!_forceGraphRebuild) {
compiledViewDefinition = getCachedCompiledViewDefinition(valuationTime, versionCorrection);
boolean marketDataProviderDirty = _marketDataManager.isMarketDataProviderDirty();
_marketDataManager.markMarketDataProviderClean();
if (compiledViewDefinition != null) {
executionCacheLocks.getFirst().unlock();
broadLock = false;
do {
// The cast below is bad, but only temporary -- the function initialiser id needs to go
if (functionInitId != ((CompiledViewDefinitionWithGraphsImpl) compiledViewDefinition).getFunctionInitId()) {
// The function repository has been reinitialized which invalidates any previous graphs
// TODO: [PLAT-2237, PLAT-1623, PLAT-2240] Get rid of this
break;
}
final Map<ComputationTargetReference, UniqueId> resolvedIdentifiers = compiledViewDefinition.getResolvedIdentifiers();
// TODO: The check below works well for the historical valuation case, but if the resolver v/c is different for two workers in the
// group for an otherwise identical cache key then including it in the caching detail may become necessary to handle those cases.
if (!versionCorrection.equals(compiledViewDefinition.getResolverVersionCorrection())) {
final Map<UniqueId, ComputationTargetSpecification> invalidIdentifiers = getInvalidIdentifiers(resolvedIdentifiers, versionCorrection);
if (invalidIdentifiers != null) {
previousGraphs = getPreviousGraphs(previousGraphs, compiledViewDefinition);
if ((compiledViewDefinition.getPortfolio() != null) && invalidIdentifiers.containsKey(compiledViewDefinition.getPortfolio().getUniqueId())) {
// The portfolio resolution is different, invalidate or rewrite PORTFOLIO and PORTFOLIO_NODE nodes in the graph. Note that incremental
// compilation under this circumstance can be flawed if the functions have made notable use of the overall portfolio structure such that
// a full re-compilation will yield a different dependency graph to just rewriting the previous one.
final ComputationTargetResolver resolver = getProcessContext().getFunctionCompilationService().getFunctionCompilationContext().getRawComputationTargetResolver();
final ComputationTargetSpecification portfolioSpec = resolver.getSpecificationResolver().getTargetSpecification(
new ComputationTargetSpecification(ComputationTargetType.PORTFOLIO, getViewDefinition().getPortfolioId()), versionCorrection);
final ComputationTarget newPortfolio = resolver.resolve(portfolioSpec, versionCorrection);
unchangedNodes = rewritePortfolioNodes(previousGraphs, compiledViewDefinition, (Portfolio) newPortfolio.getValue());
}
// Invalidate any dependency graph nodes on the invalid targets
filterPreviousGraphs(previousGraphs, new InvalidTargetDependencyNodeFilter(invalidIdentifiers.keySet()), unchangedNodes);
previousResolutions = new ConcurrentHashMap<>(resolvedIdentifiers.size());
for (final Map.Entry<ComputationTargetReference, UniqueId> resolvedIdentifier : resolvedIdentifiers.entrySet()) {
if (invalidIdentifiers.containsKey(resolvedIdentifier.getValue())) {
if ((unchangedNodes == null) && resolvedIdentifier.getKey().getType().isTargetType(ComputationTargetType.POSITION)) {
// At least one position has changed, add all portfolio targets
ComputationTargetSpecification ctspec = invalidIdentifiers.get(resolvedIdentifier.getValue());
if (ctspec != null) {
if (changedPositions == null) {
changedPositions = new HashSet<>();
}
changedPositions.add(ctspec.getUniqueId());
}
}
} else {
previousResolutions.put(resolvedIdentifier.getKey(), resolvedIdentifier.getValue());
}
}
} else {
compiledViewDefinition = compiledViewDefinition.withResolverVersionCorrection(versionCorrection);
cacheCompiledViewDefinition(compiledViewDefinition);
}
}
if (!CompiledViewDefinitionWithGraphsImpl.isValidFor(compiledViewDefinition, valuationTime)) {
// Invalidate any dependency graph nodes that use functions that are no longer valid
previousGraphs = getPreviousGraphs(previousGraphs, compiledViewDefinition);
filterPreviousGraphs(previousGraphs, new InvalidFunctionDependencyNodeFilter(valuationTime), unchangedNodes);
}
if (marketDataProviderDirty) {
previousGraphs = invalidateMarketDataSourcingNodes(previousGraphs, compiledViewDefinition, versionCorrection, unchangedNodes);
}
if (previousGraphs == null) {
// Existing cached model is valid (an optimization for the common case of similar, increasing valuation times)
return compiledViewDefinition;
}
if (previousResolutions == null) {
previousResolutions = new ConcurrentHashMap<>(resolvedIdentifiers);
}
} while (false);
executionCacheLocks.getFirst().lock();
broadLock = true;
}
}
final MarketDataAvailabilityProvider availabilityProvider = _marketDataManager.getAvailabilityProvider();
final ViewCompilationServices compilationServices = getProcessContext().asCompilationServices(availabilityProvider);
if (previousGraphs != null) {
s_logger.info("Performing incremental graph compilation");
_compilationTask = ViewDefinitionCompiler.incrementalCompileTask(getViewDefinition(), compilationServices, valuationTime, versionCorrection, previousGraphs, previousResolutions,
changedPositions, unchangedNodes);
} else {
s_logger.info("Performing full graph compilation");
_compilationTask = ViewDefinitionCompiler.fullCompileTask(getViewDefinition(), compilationServices, valuationTime, versionCorrection);
}
try {
if (!getJob().isTerminated()) {
compiledViewDefinition = _compilationTask.get();
ComputationTargetResolver.AtVersionCorrection resolver = getProcessContext().getFunctionCompilationService().getFunctionCompilationContext().getRawComputationTargetResolver()
.atVersionCorrection(versionCorrection);
compiledViewDefinition = initialiseMarketDataManipulation(compiledViewDefinition, resolver);
cacheCompiledViewDefinition(compiledViewDefinition);
} else {
return null;
}
} finally {
_compilationTask = null;
}
} catch (final Exception e) {
final String message = MessageFormat.format("Error compiling view definition {0} for time {1}",
getViewDefinition().getUniqueId(),
valuationTime);
viewDefinitionCompilationFailed(valuationTime, new OpenGammaRuntimeException(message, e));
throw new OpenGammaRuntimeException(message, e);
} finally {
_forceGraphRebuild = false;
if (broadLock) {
executionCacheLocks.getFirst().unlock();
}
executionCacheLocks.getSecond().unlock();
}
// [PLAT-984]
// Assume that valuation times are increasing in real-time towards the expiry of the view definition, so that we
// can predict the time to expiry. If this assumption is wrong then the worst we do is trigger an unnecessary
// cycle. In the predicted case, we trigger a cycle on expiry so that any new market data subscriptions are made
// straight away.
if ((compiledViewDefinition.getValidTo() != null) && getExecutionOptions().getFlags().contains(ViewExecutionFlags.TRIGGER_CYCLE_ON_MARKET_DATA_CHANGED)) {
final Duration durationToExpiry = _marketDataManager.getMarketDataProvider().getRealTimeDuration(valuationTime,
compiledViewDefinition.getValidTo());
final long expiryNanos = System.nanoTime() + durationToExpiry.toNanos();
_compilationExpiryCycleTrigger.set(expiryNanos, ViewCycleTriggerResult.forceFull());
// REVIEW Andrew 2012-11-02 -- If we are ticking live, then this is almost right (System.nanoTime will be close to valuationTime, depending on how
// long the compilation took). If we are running through historical data then this is quite a meaningless trigger.
} else {