DBCollection destination = getShard2().getDB("mongo_hadoop").getCollection("yield_historical.in");
for (DBObject doc : data) {
destination.insert(doc, WriteConcern.UNACKNOWLEDGED);
}
new MapReduceJob(TreasuryYieldXMLConfig.class.getName())
.jar(JOBJAR_PATH)
.param(SPLITS_SLAVE_OK, "true")
.param(SPLITS_USE_SHARDS, "true")
.param(SPLITS_USE_CHUNKS, "false")
.inputUris(new MongoClientURIBuilder(getInputUri()).readPreference(ReadPreference.secondary()).build())
.execute(isRunTestInVm());
compareResults(collection, getReference());
collection.drop();
new MapReduceJob(TreasuryYieldXMLConfig.class.getName())
.jar(JOBJAR_PATH)
.inputUris(new MongoClientURIBuilder(getInputUri()).readPreference(ReadPreference.secondary()).build())
.param(SPLITS_SLAVE_OK, "true")
.param(SPLITS_USE_SHARDS, "true")
.param(SPLITS_USE_CHUNKS, "true")