if(fs.exists(outputDir)) {
info("Deleting previous output in " + outputDir + " for building store " + storeName);
fs.delete(outputDir, true);
}
HadoopStoreBuilder builder = null;
if(isAvro) {
if(conf.getNumChunks() == -1) {
builder = new HadoopStoreBuilder(configuration,
AvroStoreBuilderMapper.class,
(Class<? extends InputFormat>) AvroInputFormat.class,
cluster,
storeDef,
chunkSize,
tempDir,
outputDir,
inputPath,
checkSumType,
saveKeys,
reducerPerBucket);
} else {
builder = new HadoopStoreBuilder(configuration,
AvroStoreBuilderMapper.class,
(Class<? extends InputFormat>) AvroInputFormat.class,
cluster,
storeDef,
tempDir,
outputDir,
inputPath,
checkSumType,
saveKeys,
reducerPerBucket,
conf.getNumChunks());
}
builder.buildAvro();
return;
}
if(conf.getNumChunks() == -1) {
builder = new HadoopStoreBuilder(configuration,
VoldemortStoreBuilderMapper.class,
JsonSequenceFileInputFormat.class,
cluster,
storeDef,
chunkSize,
tempDir,
outputDir,
inputPath,
checkSumType,
saveKeys,
reducerPerBucket);
} else {
builder = new HadoopStoreBuilder(configuration,
VoldemortStoreBuilderMapper.class,
JsonSequenceFileInputFormat.class,
cluster,
storeDef,
tempDir,
outputDir,
inputPath,
checkSumType,
saveKeys,
reducerPerBucket,
conf.getNumChunks());
}
builder.build();
}