List<EditLogInputStream> streams = Lists.newArrayList();
// get inprogress streams
qjm.selectInputStreams(streams, 0, true, false);
long lastReadTxId = -1;
EditLogInputStream is = streams.get(0);
for (int i = 0; i < 3; i++) {
FSEditLogOp op = is.readOp();
assertNotNull(op);
lastReadTxId = op.getTransactionId();
LOG.info("Read transaction: " + op + " with txid: "
+ op.getTransactionId());
}
// get the stream we are tailing from
URLLogInputStream[] tailing = new URLLogInputStream[1];
JournalNode jn = getTailingJN(is, tailing);
long position = is.getPosition();
// stop the node
jn.stopAndJoin(0);
// refresh the input stream
is.refresh(position, 0);
LOG.info("Checking failed stream");
// this guy should be disabled
// its position should be fixed
URLLogInputStream urlis = tailing[0];
assertTrue(urlis.isDisabled());
assertEquals(position, urlis.getPosition());
assertEquals(HdfsConstants.INVALID_TXID, urlis.getLastTxId());
try {
urlis.readOp();
fail("This read should fail");
} catch (IOException e) {
LOG.info("Expected exception: ", e);
} // expected
// reads should fall back to another stream
LOG.info("We should be able to read from the stream");
for (int i = 0; i < 3; i++) {
FSEditLogOp op = is.readOp();
assertNotNull(op);
assertEquals(++lastReadTxId, op.getTransactionId());
LOG.info("Read transaction: " + op + " with txid: "
+ op.getTransactionId());
position = is.getPosition();
}
LOG.info("Current state of the input stream: " + is.getName());
// refresh again
is.refresh(position, 0);
assertEquals(position, urlis.getPosition());
assertTrue(urlis.isDisabled());
assertEquals(HdfsConstants.INVALID_TXID, urlis.getLastTxId());
}