//
// Create some content in the default table of the default repository
//
LRepository repository = repoMgr.getDefaultRepository();
TypeManager typeManager = repository.getTypeManager();
IdGenerator idGenerator = repository.getIdGenerator();
LTable table = repository.getDefaultTable();
FieldType ft1 = typeManager.createFieldType("STRING", new QName("test", "field1"), Scope.NON_VERSIONED);
RecordType rt1 = typeManager.recordTypeBuilder()
.defaultNamespace("test")
.name("rt1")
.fieldEntry().use(ft1).add()
.create();
for (int i = 0; i < 100; i++) {
table.recordBuilder()
.id(String.format("%1$03d", i))
.recordType(rt1.getName())
.field(ft1.getName(), "foo bar bar")
.create();
}
//
// Also create some content in another repository with two tables
//
RepositoryModelImpl repositoryModel = new RepositoryModelImpl(lilyProxy.getLilyServerProxy().getZooKeeper());
String repositoryName = "otherrepo";
repositoryModel.create(repositoryName);
assertTrue(repositoryModel.waitUntilRepositoryInState(repositoryName, RepositoryLifecycleState.ACTIVE, 60000L));
repositoryModel.close();
LRepository repository2 = repoMgr.getRepository(repositoryName);
repository2.getTableManager().createTable("foobar");
LTable table2 = repository2.getTable("foobar");
LTable table3 = repository2.getDefaultTable();
for (int i = 0; i < 50; i++) {
table2.recordBuilder()
.id(String.valueOf(i))
.recordType(rt1.getName())
.field(ft1.getName(), "foo bar bar")
.create();
}
for (int i = 0; i < 20; i++) {
table3.recordBuilder()
.id(String.valueOf(i))
.recordType(rt1.getName())
.field(ft1.getName(), "foo bar bar")
.create();
}
//
// Launch MapReduce job on default repository
//
{
Configuration config = HBaseConfiguration.create();
config.set("mapred.job.tracker", "localhost:8021");
config.set("fs.defaultFS", "hdfs://localhost:8020");
Job job = new Job(config, "Test1");
job.setJarByClass(Test1Mapper.class);
job.setMapperClass(Test1Mapper.class);
job.setOutputFormatClass(NullOutputFormat.class);
job.setNumReduceTasks(0);
LilyMapReduceUtil.initMapperJob(null, "localhost", repository, job);
boolean b = job.waitForCompletion(true);
if (!b) {
throw new IOException("error with job!");
}
// Verify some counters
assertEquals("Number of launched map tasks", 5L, getTotalLaunchedMaps(job));
assertEquals("Number of input records", 100L, getTotalInputRecords(job));
}
//
// Launch a job with a custom scan on the default repository
//
{
Configuration config = HBaseConfiguration.create();
config.set("mapred.job.tracker", "localhost:8021");
config.set("fs.defaultFS", "hdfs://localhost:8020");
Job job = new Job(config, "Test1");
job.setJarByClass(Test1Mapper.class);
job.setMapperClass(Test1Mapper.class);
job.setOutputFormatClass(NullOutputFormat.class);
job.setNumReduceTasks(0);
RecordScan scan = new RecordScan();
scan.setStartRecordId(idGenerator.newRecordId(String.format("%1$03d", 15)));
scan.setStopRecordId(idGenerator.newRecordId(String.format("%1$03d", 25)));
LilyMapReduceUtil.initMapperJob(scan, "localhost", repository, job);
boolean b = job.waitForCompletion(true);
if (!b) {