String adaptorId = agent
.processAddCommand("add lr =org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8"
+ " lines " + testFile + " 0");
assertTrue(adaptorId.equals("adaptor_lr"));
System.out.println("getting a chunk...");
Chunk c = chunks.waitForAChunk();
System.out.println("got chunk");
while (!c.getDataType().equals("lines")) {
c = chunks.waitForAChunk();
}
assertTrue(c.getSeqID() == testFile.length());
assertTrue(c.getRecordOffsets().length == 80);
int recStart = 0;
for (int rec = 0; rec < c.getRecordOffsets().length; ++rec) {
String record = new String(c.getData(), recStart,
c.getRecordOffsets()[rec] - recStart + 1);
System.out.println("record " + rec + " was: " + record);
assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n"));
recStart = c.getRecordOffsets()[rec] + 1;
}
assertTrue(c.getDataType().equals("lines"));
testFile = makeTestFile("chukwaLogRotateTest", 40);
c = chunks.waitForAChunk();
System.out.println("got chunk");
while (!c.getDataType().equals("lines")) {
c = chunks.waitForAChunk();
}
// assertTrue(c.getSeqID() == testFile.length());
assertTrue(c.getRecordOffsets().length == 40);
recStart = 0;
for (int rec = 0; rec < c.getRecordOffsets().length; ++rec) {
String record = new String(c.getData(), recStart,
c.getRecordOffsets()[rec] - recStart + 1);
System.out.println("record " + rec + " was: " + record);
assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n"));
recStart = c.getRecordOffsets()[rec] + 1;
}
assertTrue(c.getDataType().equals("lines"));
agent.stopAdaptor(adaptorId, false);
agent.shutdown();
Thread.sleep(2000);
}