Package org.apache.hadoop.hive.ql.plan

Examples of org.apache.hadoop.hive.ql.plan.MapredLocalWork$BucketMapJoinContext


      plan.getPathToAliases().get(path).add(alias);
      plan.getPathToPartitionInfo().put(path, new PartitionDesc(tt_desc, null));
      plan.getAliasToWork().put(alias, topOp);
    } else {
      // populate local work if needed
      MapredLocalWork localPlan = plan.getMapLocalWork();
      if (localPlan == null) {
        localPlan = new MapredLocalWork(
            new LinkedHashMap<String, Operator<? extends Serializable>>(),
            new LinkedHashMap<String, FetchWork>());
      }

      assert localPlan.getAliasToWork().get(alias) == null;
      assert localPlan.getAliasToFetchWork().get(alias) == null;
      localPlan.getAliasToWork().put(alias, topOp);
      localPlan.getAliasToFetchWork().put(alias, new FetchWork(alias, tt_desc));
      plan.setMapLocalWork(localPlan);
    }
  }
View Full Code Here


      String libJarsOption;

      // write out the plan to a local file
      Path planPath = new Path(ctx.getLocalTmpFileURI(), "plan.xml");
      OutputStream out = FileSystem.getLocal(conf).create(planPath);
      MapredLocalWork plan = getWork();
      LOG.info("Generating plan file " + planPath.toString());
      Utilities.serializeMapRedLocalWork(plan, out);

      String isSilent = "true".equalsIgnoreCase(System.getProperty("test.silent")) ? "-nolog" : "";
View Full Code Here

    if (StringUtils.isNotBlank(addedArchives)) {
      initializeFiles("tmparchives", addedArchives);
    }

    try{
      MapredLocalWork localwork = work.getMapLocalWork();
      if (localwork != null) {
        boolean localMode = HiveConf.getVar(job, HiveConf.ConfVars.HADOOPJT).equals("local");
        if (!localMode) {
          Path localPath = new Path(localwork.getTmpFileURI());
          Path hdfsPath = new Path(work.getTmpHDFSFileURI());

          FileSystem hdfs = hdfsPath.getFileSystem(job);
          FileSystem localFS = localPath.getFileSystem(job);
          FileStatus[] hashtableFiles = localFS.listStatus(localPath);
          int fileNumber = hashtableFiles.length;
          String[] fileNames = new String[fileNumber];

          for ( int i = 0; i < fileNumber; i++){
            fileNames[i] = hashtableFiles[i].getPath().getName();
          }

          //package and compress all the hashtable files to an archive file
          String parentDir = localPath.toUri().getPath();
          String stageId = this.getId();
          String archiveFileURI = Utilities.generateTarURI(parentDir, stageId);
          String archiveFileName = Utilities.generateTarFileName(stageId);
          localwork.setStageID(stageId);

          FileUtils.tar(parentDir, fileNames,archiveFileName);
          Path archivePath = new Path(archiveFileURI);
          LOG.info("Archive "+ hashtableFiles.length+" hash table files to " + archiveFileURI);
View Full Code Here

      throw new HiveException(e.getMessage(), e);
    }
    int ret;
    if (localtask) {
      memoryMXBean = ManagementFactory.getMemoryMXBean();
      MapredLocalWork plan = Utilities.deserializeMapRedLocalWork(pathData, conf);
      MapredLocalTask ed = new MapredLocalTask(plan, conf, isSilent);
      ret = ed.executeFromChildJVM(new DriverContext());

    } else {
      MapredWork plan = Utilities.deserializeMapRedWork(pathData, conf);
View Full Code Here

    // localize any map-reduce input paths
    ctx.localizeKeys((Map<String, Object>) ((Object) work.getPathToAliases()));
    ctx.localizeKeys((Map<String, Object>) ((Object) work.getPathToPartitionInfo()));

    // localize any input paths for maplocal work
    MapredLocalWork l = work.getMapLocalWork();
    if (l != null) {
      Map<String, FetchWork> m = l.getAliasToFetchWork();
      if (m != null) {
        for (FetchWork fw : m.values()) {
          String s = fw.getTblDir();
          if ((s != null) && ctx.isMRTmpFileURI(s)) {
            fw.setTblDir(ctx.localizeMRTmpFileURI(s));
View Full Code Here

    if (StringUtils.isNotBlank(addedArchives)) {
      initializeFiles("tmparchives", addedArchives);
    }

    try{
      MapredLocalWork localwork = work.getMapLocalWork();
      if (localwork != null) {
        boolean localMode = HiveConf.getVar(job, HiveConf.ConfVars.HADOOPJT).equals("local");
        if (!localMode) {
          Path localPath = new Path(localwork.getTmpFileURI());
          Path hdfsPath = new Path(work.getTmpHDFSFileURI());

          FileSystem hdfs = hdfsPath.getFileSystem(job);
          FileSystem localFS = localPath.getFileSystem(job);
          FileStatus[] hashtableFiles = localFS.listStatus(localPath);
          int fileNumber = hashtableFiles.length;
          String[] fileNames = new String[fileNumber];

          for ( int i = 0; i < fileNumber; i++){
            fileNames[i] = hashtableFiles[i].getPath().getName();
          }

          //package and compress all the hashtable files to an archive file
          String parentDir = localPath.toUri().getPath();
          String stageId = this.getId();
          String archiveFileURI = Utilities.generateTarURI(parentDir, stageId);
          String archiveFileName = Utilities.generateTarFileName(stageId);
          localwork.setStageID(stageId);

          FileUtils.tar(parentDir, fileNames,archiveFileName);
          Path archivePath = new Path(archiveFileURI);
          LOG.info("Archive "+ hashtableFiles.length+" hash table files to " + archiveFileURI);
View Full Code Here

      throw new HiveException(e.getMessage(), e);
    }
    int ret;
    if (localtask) {
      memoryMXBean = ManagementFactory.getMemoryMXBean();
      MapredLocalWork plan = Utilities.deserializeMapRedLocalWork(pathData, conf);
      MapredLocalTask ed = new MapredLocalTask(plan, conf, isSilent);
      ret = ed.executeFromChildJVM(new DriverContext());

    } else {
      MapredWork plan = Utilities.deserializeMapRedWork(pathData, conf);
View Full Code Here

    // localize any map-reduce input paths
    ctx.localizeKeys((Map<String, Object>) ((Object) work.getPathToAliases()));
    ctx.localizeKeys((Map<String, Object>) ((Object) work.getPathToPartitionInfo()));

    // localize any input paths for maplocal work
    MapredLocalWork l = work.getMapLocalWork();
    if (l != null) {
      Map<String, FetchWork> m = l.getAliasToFetchWork();
      if (m != null) {
        for (FetchWork fw : m.values()) {
          String s = fw.getTblDir();
          if ((s != null) && ctx.isMRTmpFileURI(s)) {
            fw.setTblDir(ctx.localizeMRTmpFileURI(s));
View Full Code Here

          dummyOp.close(abort);
        }
      }

      if (fetchOperators != null) {
        MapredLocalWork localWork = mo.getConf().getMapLocalWork();
        for (Map.Entry<String, FetchOperator> entry : fetchOperators.entrySet()) {
          Operator<? extends Serializable> forwardOp = localWork
              .getAliasToWork().get(entry.getKey());
          forwardOp.close(abort);
        }
      }
View Full Code Here

      String libJarsOption;

      // write out the plan to a local file
      Path planPath = new Path(ctx.getLocalTmpFileURI(), "plan.xml");
      OutputStream out = FileSystem.getLocal(conf).create(planPath);
      MapredLocalWork plan = getWork();
      LOG.info("Generating plan file " + planPath.toString());
      Utilities.serializeMapRedLocalWork(plan, out);

      String isSilent = "true".equalsIgnoreCase(System.getProperty("test.silent")) ? "-nolog" : "";
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.plan.MapredLocalWork$BucketMapJoinContext

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.