// 1) Just one correct file
fs.create(testPath);
fs.create(junkPath);
writeRandomData(testPath, 100);
DataFileChunkSet set = HadoopStoreBuilderUtils.getDataFileChunkSet(fs,
HadoopStoreBuilderUtils.getDataChunkFiles(fs,
headPath,
0,
0));
assertEquals(set.getNumChunks(), 1);
assertEquals(set.getDataFileSize(0), 100);
// 2) Another correct file
testPath = new Path(headPath, "0_0_99.data");
fs.create(testPath);
writeRandomData(testPath, 99);
set = HadoopStoreBuilderUtils.getDataFileChunkSet(fs,
HadoopStoreBuilderUtils.getDataChunkFiles(fs,
headPath,
0,
0));
assertEquals(set.getNumChunks(), 2);
assertEquals(set.getDataFileSize(0), 99);
assertEquals(set.getDataFileSize(1), 100);
// 3) Add some more files
testPath = new Path(headPath, "0_0_1.data");
fs.create(testPath);
writeRandomData(testPath, 1);
testPath = new Path(headPath, "0_0_10.data");
fs.create(testPath);
writeRandomData(testPath, 10);
testPath = new Path(headPath, "0_0_999.data");
fs.create(testPath);
writeRandomData(testPath, 999);
testPath = new Path(headPath, "0_0_101.data");
fs.create(testPath);
writeRandomData(testPath, 101);
testPath = new Path(headPath, "0_0_1000.data");
fs.create(testPath);
writeRandomData(testPath, 1000);
set = HadoopStoreBuilderUtils.getDataFileChunkSet(fs,
HadoopStoreBuilderUtils.getDataChunkFiles(fs,
headPath,
0,
0));
assertEquals(set.getNumChunks(), 7);
assertEquals(set.getDataFileSize(0), 1);
assertEquals(set.getDataFileSize(1), 10);
assertEquals(set.getDataFileSize(2), 99);
assertEquals(set.getDataFileSize(3), 100);
assertEquals(set.getDataFileSize(4), 101);
assertEquals(set.getDataFileSize(5), 999);
assertEquals(set.getDataFileSize(6), 1000);
}