{
if (! validPlatform()) return;
JobConf jc = HadoopPlugin.getJobFactory(this.getClass().getName()).newJob();
HadoopUtility.toHConfiguration(index, jc);
CompressingMetaIndexInputFormat.setStructure(jc, name);
CompressingMetaIndexInputFormat information = new CompressingMetaIndexInputFormat();
information.validateInput(jc);
information.overrideDataFileBlockSize(blocksize);
InputSplit[] splits = information.getSplits(jc, 2);
Set<String> unseenDocnos = new HashSet<String>(Arrays.asList(docnos));
int seenDocuments = 0;
for(InputSplit split : splits)
{
RecordReader<IntWritable,Wrapper<String[]>> rr = information.getRecordReader(split, jc, null);
IntWritable key = rr.createKey();
Wrapper<String[]> value = rr.createValue();
while(rr.next(key, value))
{
seenDocuments++;