int numberOfShardsToMergePerPass = paths.length / newShardCount;
for (int i = 0; i < newShardCount; i++) {
System.out.println("Base Index [" + paths[i] + "]");
IndexWriterConfig lconf = new IndexWriterConfig(LUCENE_VERSION, new KeywordAnalyzer());
lconf.setCodec(new Blur022Codec());
HdfsDirectory dir = new HdfsDirectory(getConf(), paths[i]);
IndexWriter indexWriter = new IndexWriter(dir, lconf);
Directory[] dirs = new Directory[numberOfShardsToMergePerPass - 1];
Path[] pathsToDelete = new Path[numberOfShardsToMergePerPass - 1];
for (int p = 1; p < numberOfShardsToMergePerPass; p++) {
Path pathToMerge = paths[i + p * newShardCount];
System.out.println("Merge [" + pathToMerge + "]");
dirs[p - 1] = new HdfsDirectory(getConf(), pathToMerge);
pathsToDelete[p - 1] = pathToMerge;
}
indexWriter.addIndexes(dirs);
// Causes rewrite of of index and the symlinked files are
// merged/rewritten.