// TODO(zundel): There is an optimal order to compile these modules in.
// Modify ModuleDefLoader to be able to figure that out and sort them for
// us.
for (String moduleToCompile : options.getModuleNames()) {
ModuleDef module;
// The units in this set already belong to an archive and should not be
// written out.
Set<String> currentModuleArchivedUnits = new HashSet<String>();
try {
module = ModuleDefLoader.loadFromClassPath(logger, compilerContext, moduleToCompile);
compilerContext = compilerContextBuilder.module(module).build();
} catch (Throwable e) {
CompilationProblemReporter.logAndTranslateException(logger, e);
return false;
}
SpeedTracerLogger.Event loadAllArchives =
SpeedTracerLogger.start(CompilerEventType.LOAD_ARCHIVE, "module", moduleToCompile);
try {
Collection<URL> archiveURLs = module.getAllCompilationUnitArchiveURLs();
if (logger.isLoggable(TreeLogger.TRACE) && archiveURLs != null) {
for (URL archiveURL : archiveURLs) {
logger.log(TreeLogger.TRACE, "Found archive: " + archiveURL);
}
}
// Don't re-archive previously compiled units from this invocation of CompileModule.
for (String compiledModuleName : newlyCompiledModules.keySet()) {
if (module.isInherited(compiledModuleName)) {
currentModuleArchivedUnits.addAll(newlyCompiledModules.get(compiledModuleName));
}
}
// Load up previously archived modules
for (URL archiveURL : archiveURLs) {
String archiveURLString = archiveURL.toString();
Set<String> unitPaths = unitsInArchives.get(archiveURLString);
// Don't bother deserializing archives that have already been read.
if (unitPaths != null) {
currentModuleArchivedUnits.addAll(unitPaths);
continue;
}
SpeedTracerLogger.Event loadArchive =
SpeedTracerLogger.start(CompilerEventType.LOAD_ARCHIVE, "dependentModule", archiveURL
.toString());
try {
CompilationUnitArchive archive = CompilationUnitArchive.createFromURL(archiveURL);
// Pre-populate CompilationStateBuilder with .gwtar files
CompilationStateBuilder.addArchive(compilerContext, archive);
// Remember already archived units - we don't want to add them back.
if (!archive.getTopModuleName().equals(moduleToCompile)) {
Set<String> archivedUnits = new HashSet<String>();
unitsInArchives.put(archiveURLString, archivedUnits);
for (CompilationUnit unit : archive.getUnits().values()) {
archivedUnits.add(unit.getResourcePath());
}
currentModuleArchivedUnits.addAll(archivedUnits);
}
} catch (IOException ex) {
logger.log(TreeLogger.WARN, "Unable to read: " + archiveURL + ". Skipping: " + ex);
} catch (ClassNotFoundException ex) {
logger
.log(TreeLogger.WARN, "Incompatible archive: " + archiveURL + ". Skipping: " + ex);
} finally {
loadArchive.end();
}
}
} finally {
loadAllArchives.end();
}
CompilationState compilationState;
try {
compilationState = module.getCompilationState(logger, compilerContext);
} catch (Throwable e) {
CompilationProblemReporter.logAndTranslateException(logger, e);
return false;
}
if (options.isStrict() && compilationState.hasErrors()) {
logger.log(TreeLogger.ERROR, "Failed to compile " + moduleToCompile);
return false;
}
Set<String> compiledUnits = Sets.newHashSet();
CompilationUnitArchive outputArchive = new CompilationUnitArchive(moduleToCompile);
for (CompilationUnit unit : compilationState.getCompilationUnits()) {
if (!currentModuleArchivedUnits.contains(unit.getResourcePath())) {
outputArchive.addUnit(unit);
compiledUnits.add(unit.getResourcePath());
}
}
newlyCompiledModules.put(moduleToCompile, compiledUnits);
String slashedModuleName =
module.getName().replace('.', '/') + ModuleDefLoader.COMPILATION_UNIT_ARCHIVE_SUFFIX;
File outputFile = new File(outputDir, slashedModuleName);
outputFile.getParentFile().mkdirs();
logger.log(TreeLogger.INFO, "Writing " + outputArchive.getUnits().size() + " units to "
+ outputFile.getAbsolutePath());
try {