}
private void configureNode(ConfiguredNodeImplementation configuration) throws Exception {
// Create workspace model
Workspace workspace = workspaceFactory.createWorkspace();
workspace.setModelResolver(new ExtensibleModelResolver(workspace, modelResolvers, modelFactories));
// Load the specified contributions
for (Contribution c : configuration.getContributions()) {
URI contributionURI = URI.create(c.getURI());
URI uri = createURI(c.getLocation());
if (uri.getScheme() == null) {
uri = new File(c.getLocation()).toURI();
}
URL contributionURL = uri.toURL();
// Load the contribution
logger.log(Level.INFO, "Loading contribution: " + contributionURL);
Contribution contribution = contributionProcessor.read(null, contributionURI, contributionURL);
workspace.getContributions().add(contribution);
analyzeProblems();
}
// Build an aggregated SCA definitions model. Must be done before we try and
// resolve any contributions or composites as they may depend on the full
// definitions.xml picture
// get all definitions.xml artifacts from contributions and aggregate
// into the system contribution. In turn add a default import into
// each contribution so that for unresolved items the resolution
// processing will look in the system contribution
for (Contribution contribution: workspace.getContributions()) {
// aggregate definitions
for (Artifact artifact : contribution.getArtifacts()) {
Object model = artifact.getModel();
if (model instanceof Definitions) {
DefinitionsUtil.aggregate((Definitions)model, systemDefinitions);
}
}
// create a default import and wire it up to the system contribution
// model resolver. This is the trick that makes the resolution processing
// skip over to the system contribution if resolution is unsuccessful
// in the current contribution
DefaultImport defaultImport = contributionFactory.createDefaultImport();
defaultImport.setModelResolver(systemContribution.getModelResolver());
contribution.getImports().add(defaultImport);
}
// now resolve the system contribution and add the contribution
// to the workspace
contributionProcessor.resolve(systemContribution, workspace.getModelResolver());
workspace.getContributions().add(systemContribution);
// TODO - Now we can calculate applicable policy sets for each composite
// Build the contribution dependencies
Set<Contribution> resolved = new HashSet<Contribution>();
for (Contribution contribution: workspace.getContributions()) {
contributionDependencyBuilder.build(contribution, workspace, monitor);
// Resolve contributions
for (Contribution dependency: contribution.getDependencies()) {
if (!resolved.contains(dependency)) {
resolved.add(dependency);
contributionProcessor.resolve(dependency, workspace.getModelResolver());
}
}
}
composite = configuration.getComposite();
if (composite == null) {
setDefaultComposite(configuration, workspace);
}
// Find the composite in the given contributions
boolean found = false;
Artifact compositeFile = contributionFactory.createArtifact();
compositeFile.setUnresolved(true);
compositeFile.setURI(composite.getURI());
for (Contribution contribution: workspace.getContributions()) {
ModelResolver resolver = contribution.getModelResolver();
// for (Artifact artifact : contribution.getArtifacts()){
// logger.log(Level.INFO,"artifact - " + artifact.getURI());
// }
Artifact resolvedArtifact = resolver.resolveModel(Artifact.class, compositeFile);