// only write out if something changed after the cache file was last written, or if the file has been deleted
if (!cacheDirty && cacheFile.exists())
return;
FileChannel channel = null;
try {
if (!cacheFile.exists()) {
File dir = cacheFile.getParentFile();
if (!dir.mkdirs())
logger.error("Cant make cache directory= "+cacheFile);
}
// Get a file channel for the file
FileOutputStream fos = new FileOutputStream(cacheFile);
channel = fos.getChannel();
// Try acquiring the lock without blocking. This method returns
// null or throws an exception if the file is already locked.
FileLock lock;
try {
lock = channel.tryLock();
} catch (OverlappingFileLockException e) {
// File is already locked in this thread or virtual machine
return; // give up
}
if (lock == null) return;
PrintWriter out = new PrintWriter(fos);
out.print("<?xml version='1.0' encoding='UTF-8'?>\n");
out.print("<aggregation xmlns='http://www.unidata.ucar.edu/namespaces/netcdf/ncml-2.2' version='3' ");
out.print("type='" + type + "' ");
if (dimName != null)
out.print("dimName='" + dimName + "' ");
if (datasetManager.getRecheck() != null)
out.print("recheckEvery='" + datasetManager.getRecheck() + "' ");
out.print(">\n");
List<Dataset> nestedDatasets = getDatasets();
for (Dataset dataset : nestedDatasets) {
DatasetOuterDimension dod = (DatasetOuterDimension) dataset;
if (dod.getId() == null) logger.warn("id is null");
out.print(" <netcdf id='" + dod.getId() + "' ");
out.print("ncoords='" + dod.getNcoords(null) + "' >\n");
for (CacheVar pv : cacheList) {
Array data = pv.getData(dod.getId());
if (data != null) {
out.print(" <cache varName='" + pv.varName + "' >");
NCdumpW.printArray(data, out);
out.print("</cache>\n");
if (logger.isDebugEnabled())
logger.debug(" wrote array = " + pv.varName + " nelems= "+data.getSize()+" for "+dataset.getLocation());
}
}
out.print(" </netcdf>\n");
}
out.print("</aggregation>\n");
out.close(); // this also closes the channel and releases the lock
cacheFile.setLastModified( datasetManager.getLastScanned());
cacheDirty = false;
if (logger.isDebugEnabled())
logger.debug("Aggregation persisted = " + cacheFile.getPath() + " lastModified= " + new Date(datasetManager.getLastScanned()));
} finally {
if (channel != null)
channel.close();
}
}