if(!outputFs.exists(nodePath)) {
logger.info("No data generated for node " + node.getId()
+ ". Generating empty folder");
outputFs.mkdirs(nodePath); // Create empty folder
outputFs.setPermission(nodePath, new FsPermission(HADOOP_FILE_PERMISSION));
logger.info("Setting permission to 755 for " + nodePath);
}
if(checkSumType != CheckSumType.NONE) {
FileStatus[] storeFiles = outputFs.listStatus(nodePath, new PathFilter() {
public boolean accept(Path arg0) {
if(arg0.getName().endsWith("checksum")
&& !arg0.getName().startsWith(".")) {
return true;
}
return false;
}
});
if(storeFiles != null && storeFiles.length > 0) {
Arrays.sort(storeFiles, new IndexFileLastComparator());
FSDataInputStream input = null;
for(FileStatus file: storeFiles) {
try {
input = outputFs.open(file.getPath());
byte fileCheckSum[] = new byte[CheckSum.checkSumLength(this.checkSumType)];
input.read(fileCheckSum);
logger.debug("Checksum for file " + file.toString() + " - "
+ new String(Hex.encodeHex(fileCheckSum)));
checkSumGenerator.update(fileCheckSum);
} catch(Exception e) {
logger.error("Error while reading checksum file " + e.getMessage(),
e);
} finally {
if(input != null)
input.close();
}
outputFs.delete(file.getPath(), false);
}
metadata.add(ReadOnlyStorageMetadata.CHECKSUM_TYPE,
CheckSum.toString(checkSumType));
String checkSum = new String(Hex.encodeHex(checkSumGenerator.getCheckSum()));
logger.info("Checksum for node " + node.getId() + " - " + checkSum);
metadata.add(ReadOnlyStorageMetadata.CHECKSUM, checkSum);
}
}
// Write metadata
Path metadataPath = new Path(nodePath, ".metadata");
FSDataOutputStream metadataStream = outputFs.create(metadataPath);
outputFs.setPermission(metadataPath, new FsPermission(HADOOP_FILE_PERMISSION));
logger.info("Setting permission to 755 for " + metadataPath);
metadataStream.write(metadata.toJsonString().getBytes());
metadataStream.flush();
metadataStream.close();