Package org.apache.hadoop.hive.ql.plan

Examples of org.apache.hadoop.hive.ql.plan.ExplainWork


      }
    } else if (fetchTask != null) {
      tasks.add(fetchTask);
    }

    rootTasks.add(TaskFactory.get(new ExplainWork(ctx.getResFile().toString(),
        tasks, ((ASTNode) ast.getChild(0)).toStringTree(), extended), conf));
  }
View Full Code Here


    } else if (fetchTask != null) {
      tasks.add(fetchTask);
    }

    rootTasks.add(
      TaskFactory.get(new ExplainWork(ctx.getResFile().toString(),
        tasks,
        ((ASTNode) ast.getChild(0)).toStringTree(),
        extended,
        formatted),
      conf));
View Full Code Here

    } else if (fetchTask != null) {
      tasks.add(fetchTask);
    }

    Task<? extends Serializable> explTask =
        TaskFactory.get(new ExplainWork(ctx.getResFile().toString(),
        tasks,
        ((ASTNode) ast.getChild(0)).toStringTree(),
        extended,
        formatted),
      conf);
View Full Code Here

    ParseContext pCtx = null;
    if (sem instanceof SemanticAnalyzer) {
      pCtx = ((SemanticAnalyzer)sem).getParseContext();
    }

    ExplainWork work = new ExplainWork(ctx.getResFile(),
        pCtx,
        tasks,
        fetchTask,
        input.dump(),
        sem.getInputs(),
        extended,
        formatted,
        dependency,
        logical);

    work.setAppendTaskType(
        HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVEEXPLAINDEPENDENCYAPPENDTASKTYPES));

    Task<? extends Serializable> explTask = TaskFactory.get(work, conf);

    fieldList = explTask.getResultSchema();
View Full Code Here

    ParseContext pCtx = null;
    if (sem instanceof SemanticAnalyzer) {
      pCtx = ((SemanticAnalyzer)sem).getParseContext();
    }

    ExplainWork work = new ExplainWork(ctx.getResFile(),
        pCtx,
        tasks,
        fetchTask,
        input.dump(),
        sem,
        extended,
        formatted,
        dependency,
        logical,
        authorize);

    work.setAppendTaskType(
        HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVEEXPLAINDEPENDENCYAPPENDTASKTYPES));

    ExplainTask explTask = (ExplainTask) TaskFactory.get(work, conf);

    fieldList = explTask.getResultSchema();
View Full Code Here

    FileSystem fs = FileSystem.get(conf);
    File f = File.createTempFile("TestSemanticAnalyzer", "explain");
    Path tmp = new Path(f.getPath());
    fs.create(tmp);
    fs.deleteOnExit(tmp);
    ExplainWork work = new ExplainWork(tmp, sem.getParseContext(), sem.getRootTasks(),
        sem.getFetchTask(), astStringTree, sem, true, false, false, false, false);
    ExplainTask task = new ExplainTask();
    task.setWork(work);
    task.initialize(conf, plan, null);
    task.execute(null);
View Full Code Here

    if (sem instanceof SemanticAnalyzer) {
      pCtx = ((SemanticAnalyzer)sem).getParseContext();
    }

    Task<? extends Serializable> explTask =
        TaskFactory.get(new ExplainWork(ctx.getResFile().toString(),
        pCtx,
        tasks,
        ((ASTNode) ast.getChild(0)).toStringTree(),
        sem.getInputs(),
        extended,
View Full Code Here

    } else if (fetchTask != null) {
      tasks.add(fetchTask);
    }

    Task<? extends Serializable> explTask =
        TaskFactory.get(new ExplainWork(ctx.getResFile().toString(),
        tasks,
        ((ASTNode) ast.getChild(0)).toStringTree(),
        sem.getInputs(),
        extended,
        formatted,
View Full Code Here

      }
    } else if (fetchTask != null) {
      tasks.add(fetchTask);
    }

    rootTasks.add(TaskFactory.get(new ExplainWork(ctx.getResFile().toString(),
        tasks, ((ASTNode) ast.getChild(0)).toStringTree(), extended), conf));
  }
View Full Code Here

      }
    }
    else if (fetchTask != null)
      tasks.add(fetchTask);
       
    rootTasks.add(TaskFactory.get(new explainWork(ctx.getResFile(), tasks,
                                                  ((ASTNode)ast.getChild(0)).toStringTree(),
                                                  extended), this.conf));
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.plan.ExplainWork

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.