public final HadoopFileSystemTestSupport hadoopTestSupport = new HadoopFileSystemTestSupport();
@Test
public void testLaunchFtpHadoopJob() throws Throwable {
logger.info("Launch FTP->HDFS batch job");
TestFtpServer server = new TestFtpServer("FtpHadoop");
server.before();
// clean up from old tests
FileSystem fs = this.hadoopTestSupport.getResource();
Path p1 = new Path("foo/ftpSource/ftpSource1.txt");
fs.delete(p1, true);
Path p2 = new Path("foo/ftpSource/ftpSource2.txt");
fs.delete(p2, true);
assertFalse(fs.exists(p1));
assertFalse(fs.exists(p2));
try {
int port = server.getPort();
executeJobCreate("myftphdfs", "ftphdfs --partitionResultsTimeout=120000 --port=" + port);
checkForJobInList("myftphdfs", "ftphdfs --partitionResultsTimeout=120000 --port=" + port, true);
executeJobLaunch("myftphdfs", "{\"-remoteDirectory\":\"ftpSource\",\"hdfsDirectory\":\"foo\"}");
Table jobExecutions = listJobExecutions();
int n = 0;
while (!"COMPLETED".equals(jobExecutions.getRows().get(0).getValue(5))) {
Thread.sleep(100);
assertTrue(n++ < 100);
jobExecutions = listJobExecutions();
}
assertTrue(fs.exists(p1));
assertTrue(fs.exists(p2));
FSDataInputStream stream = fs.open(p1);
byte[] out = new byte[7];
stream.readFully(out);
stream.close();
assertEquals("source1", new String(out));
stream = fs.open(p2);
stream.readFully(out);
stream.close();
assertEquals("source2", new String(out));
}
finally {
this.hadoopTestSupport.cleanupResource();
server.after();
}
}