Package org.apache.hadoop.hive.ql.plan

Examples of org.apache.hadoop.hive.ql.plan.loadFileDesc


  public boolean isLocal() {
    loadTableDesc tbd = work.getLoadTableWork();
    if (tbd != null)
      return false;

    loadFileDesc lfd = work.getLoadFileWork();
    if (lfd != null) {
      if (lfd.getIsDfsDir()) {
        return false;
      }
      else
        return true;
    }
View Full Code Here


    cplan.getPathToAliases().put(fsConf.getDirName(), aliases);
    cplan.getAliasToWork().put(fsConf.getDirName(), ts_op);   
    cplan.getPathToPartitionInfo().put(fsConf.getDirName(), new partitionDesc(fsConf.getTableInfo(), null));
    cplan.setNumReduceTasks(-1);
   
    moveWork dummyMv = new moveWork(null, null, null, new loadFileDesc(fsOp.getConf().getDirName(), finalName, true, null, null), false);
    Task<? extends Serializable> dummyMergeTask = TaskFactory.get(dummyMv, ctx.getConf());
    List<Serializable> listWorks = new ArrayList<Serializable>();
    listWorks.add(dummyMv);
    listWorks.add(mergeTask.getWork());
    ConditionalWork cndWork = new ConditionalWork(listWorks);
View Full Code Here

          currentTableId = this.destTableId;
          this.destTableId ++;
        }

        boolean isDfsDir = (dest_type.intValue() == QBMetaData.DEST_DFS_FILE);
        this.loadFileWork.add(new loadFileDesc(queryTmpdir, destStr,
                                               isDfsDir, cols, colTypes));

        if ( tblDesc == null ) {
          table_desc = PlanUtils.getDefaultTableDesc(Integer.toString(Utilities.ctrlaCode),
                                                     cols, colTypes, false);
View Full Code Here

          currentTableId = this.destTableId;
          this.destTableId ++;
        }

        boolean isDfsDir = (dest_type.intValue() == QBMetaData.DEST_DFS_FILE);
        this.loadFileWork.add(new loadFileDesc(queryTmpdir, destStr,
                                               isDfsDir, cols, colTypes));

        table_desc = PlanUtils.getDefaultTableDesc(Integer.toString(Utilities.ctrlaCode),
                                                   cols, colTypes, false);
        outputs.add(new WriteEntity(destStr, !isDfsDir));
View Full Code Here

    cplan.getPathToAliases().put(fsConf.getDirName(), aliases);
    cplan.getAliasToWork().put(fsConf.getDirName(), ts_op);   
    cplan.getPathToPartitionInfo().put(fsConf.getDirName(), new partitionDesc(fsConf.getTableInfo(), null));
    cplan.setNumReduceTasks(-1);
   
    moveWork dummyMv = new moveWork(null, new loadFileDesc(fsOp.getConf().getDirName(), finalName, true, null, null), false);
    Task<? extends Serializable> dummyMergeTask = TaskFactory.get(dummyMv, ctx.getConf());
    List<Serializable> listWorks = new ArrayList<Serializable>();
    listWorks.add(dummyMv);
    listWorks.add(mergeTask.getWork());
    ConditionalWork cndWork = new ConditionalWork(listWorks);
View Full Code Here

  public int execute() {

    try {
      // Do any hive related operations like moving tables and files
      // to appropriate locations
      loadFileDesc lfd = work.getLoadFileWork();
      if (lfd != null) {
        Path targetPath = new Path(lfd.getTargetDir());
        Path sourcePath = new Path(lfd.getSourceDir());
        FileSystem fs = sourcePath.getFileSystem(conf);
        if (lfd.getIsDfsDir()) {
          // Just do a rename on the URIs, they belong to the same FS
          String mesg = "Moving data to: " + lfd.getTargetDir();
          String mesg_detail = " from " +  lfd.getSourceDir();
          console.printInfo(mesg, mesg_detail);

          // delete the output directory if it already exists
          fs.delete(targetPath, true);
          // if source exists, rename. Otherwise, create a empty directory
          if (fs.exists(sourcePath)) {
            if (!fs.rename(sourcePath, targetPath))
              throw new HiveException ("Unable to rename: " + sourcePath + " to: "
                                       + targetPath);
          } else
            if (!fs.mkdirs(targetPath))
              throw new HiveException ("Unable to make directory: " + targetPath);
        } else {
          // This is a local file
          String mesg = "Copying data to local directory " + lfd.getTargetDir();
          String mesg_detail =  " from " + lfd.getSourceDir();
          console.printInfo(mesg, mesg_detail);

          // delete the existing dest directory
          LocalFileSystem dstFs = FileSystem.getLocal(conf);
View Full Code Here

  public boolean isLocal() {
    loadTableDesc tbd = work.getLoadTableWork();
    if (tbd != null)
      return false;
   
    loadFileDesc lfd = work.getLoadFileWork();
    if (lfd != null) {
      if (lfd.getIsDfsDir()) {
        return false;
      }
      else
        return true;
    }
View Full Code Here

          currentTableId = this.destTableId;
          this.destTableId ++;
        }

        boolean isDfsDir = (dest_type.intValue() == QBMetaData.DEST_DFS_FILE);
        this.loadFileWork.add(new loadFileDesc(queryTmpdir, destStr,
                                               isDfsDir, cols, colTypes));

        table_desc = PlanUtils.getDefaultTableDesc(Integer.toString(Utilities.ctrlaCode),
                                                   cols, colTypes, false);
        outputs.add(new WriteEntity(destStr, !isDfsDir));
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.plan.loadFileDesc

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.