String dir = ensureTrailingSlash(path);
log.log(Level.FINER, "Computed Server URL", new Object[] { dir, user,
hostname });
HadoopServer server = ServerRegistry.getInstance().getServer(serverid);
try {
Session session = server.createSession();
// session.setTimeout(TIMEOUT);
log.log(Level.FINER, "Connected");
/*
* COMMENTED(jz) removing server start/stop support for now if (!
* attributes.containsKey("hadoop.jar")) { // start or stop server if(
* server.getServerState() == IServer.STATE_STARTING ) { String command =
* dir + "bin/start-all.sh"; execInConsole(session, command); } else if(
* server.getServerState() == IServer.STATE_STOPPING ) { String command =
* dir + "bin/stop-all.sh"; execInConsole(session, command); } }
*/
if (false) {
} else {
FileInputStream fis = null;
String jarFile, remoteFile = null;
if (attributes.containsKey("hadoop.jar")) {
jarFile = (String) attributes.get("hadoop.jar");
} else {
String memento = (String) attributes.get("hadoop.jarrable");
JarModule fromMemento = JarModule.fromMemento(memento);
jarFile = fromMemento.buildJar(new SubProgressMonitor(monitor, 100))
.toString();
}
if (jarFile.lastIndexOf('/') > 0) {
remoteFile = jarFile.substring(jarFile.lastIndexOf('/') + 1);
} else if (jarFile.lastIndexOf('\\') > 0) {
remoteFile = jarFile.substring(jarFile.lastIndexOf('\\') + 1);
}
// exec 'scp -t -p hadoop.jar' remotely
String command = "scp -p -t " + remoteFile;
Channel channel = session.openChannel("exec");
((ChannelExec) channel).setCommand(command);
// get I/O streams for remote scp
OutputStream out = channel.getOutputStream();
final InputStream in = channel.getInputStream();
channel.connect();
if (checkAck(in) != 0) {
throw new CoreException(SSH_FAILED_STATUS1);
}
// send "C0644 filesize filename", where filename should not
// include '/'
long filesize = (new File(jarFile)).length();
command = "C0644 " + filesize + " ";
if (jarFile.lastIndexOf('/') > 0) {
command += jarFile.substring(jarFile.lastIndexOf('/') + 1);
} else {
command += jarFile;
}
command += "\n";
out.write(command.getBytes());
out.flush();
if (checkAck(in) != 0) {
throw new CoreException(SSH_FAILED_STATUS2);
}
// send a content of jarFile
fis = new FileInputStream(jarFile);
byte[] buf = new byte[1024];
while (true) {
int len = fis.read(buf, 0, buf.length);
if (len <= 0) {
break;
}
out.write(buf, 0, len); // out.flush();
}
fis.close();
fis = null;
// send '\0'
buf[0] = 0;
out.write(buf, 0, 1);
out.flush();
if (checkAck(in) != 0) {
throw new CoreException(SSH_FAILED_STATUS3);
}
out.close();
channel.disconnect();
// move the jar file to a temp directory
String jarDir = "/tmp/hadoopjar"
+ new VMID().toString().replace(':', '_');
command = "mkdir " + jarDir + ";mv " + remoteFile + " " + jarDir;
channel = session.openChannel("exec");
((ChannelExec) channel).setCommand(command);
channel.connect();
channel.disconnect();
session.disconnect();
// we create a new session with a zero timeout to prevent the
// console stream
// from stalling -- eyhung
final Session session2 = server.createSessionNoTimeout();
// now remotely execute hadoop with the just sent-over jarfile
command = dir + "bin/hadoop jar " + jarDir + "/" + remoteFile;
log.fine("Running command: " + command);
execInConsole(session2, command, jarDir + "/" + remoteFile);