context.put("hdfs.batchSize", String.valueOf(batchSize));
context.put("hdfs.fileType", HDFSBadWriterFactory.BadSequenceFileType);
Configurables.configure(sink, context);
MemoryChannel channel = new MemoryChannel();
Configurables.configure(channel, new Context());
sink.setChannel(channel);
sink.start();
Calendar eventDate = Calendar.getInstance();
List<String> bodies = Lists.newArrayList();
// push the event batches into channel
for (i = 1; i < numBatches; i++) {
channel.getTransaction().begin();
try {
for (j = 1; j <= txnMax; j++) {
Event event = new SimpleEvent();
eventDate.clear();
eventDate.set(2011, i, i, i, 0); // yy mm dd
event.getHeaders().put("timestamp",
String.valueOf(eventDate.getTimeInMillis()));
event.getHeaders().put("hostname", "Host" + i);
String body = "Test." + i + "." + j;
event.setBody(body.getBytes());
bodies.add(body);
// inject fault
event.getHeaders().put("fault-until-reopen", "");
channel.put(event);
}
channel.getTransaction().commit();
} finally {
channel.getTransaction().close();
}
LOG.info("execute sink to process the events: " + sink.process());
}
LOG.info("clear any events pending due to errors: " + sink.process());
sink.stop();