if (StringUtils.isNotBlank(addedArchives)) {
initializeFiles("tmparchives", addedArchives);
}
try{
MapredLocalWork localwork = work.getMapLocalWork();
if (localwork != null) {
boolean localMode = HiveConf.getVar(job, HiveConf.ConfVars.HADOOPJT).equals("local");
if (!localMode) {
Path localPath = new Path(localwork.getTmpFileURI());
Path hdfsPath = new Path(work.getTmpHDFSFileURI());
FileSystem hdfs = hdfsPath.getFileSystem(job);
FileSystem localFS = localPath.getFileSystem(job);
FileStatus[] hashtableFiles = localFS.listStatus(localPath);
int fileNumber = hashtableFiles.length;
String[] fileNames = new String[fileNumber];
for ( int i = 0; i < fileNumber; i++){
fileNames[i] = hashtableFiles[i].getPath().getName();
}
//package and compress all the hashtable files to an archive file
String parentDir = localPath.toUri().getPath();
String stageId = this.getId();
String archiveFileURI = Utilities.generateTarURI(parentDir, stageId);
String archiveFileName = Utilities.generateTarFileName(stageId);
localwork.setStageID(stageId);
FileUtils.tar(parentDir, fileNames,archiveFileName);
Path archivePath = new Path(archiveFileURI);
LOG.info("Archive "+ hashtableFiles.length+" hash table files to " + archiveFileURI);