Examples of DriverContext


Examples of org.apache.hadoop.hive.ql.DriverContext

    int ret;
    if (localtask) {
      memoryMXBean = ManagementFactory.getMemoryMXBean();
      MapredLocalWork plan = Utilities.deserializeMapRedLocalWork(pathData, conf);
      MapredLocalTask ed = new MapredLocalTask(plan, conf, isSilent);
      ret = ed.executeFromChildJVM(new DriverContext());

    } else {
      MapredWork plan = Utilities.deserializeMapRedWork(pathData, conf);
      ExecDriver ed = new ExecDriver(plan, conf, isSilent);
      ret = ed.execute(new DriverContext());
    }

    if (ret != 0) {
      System.exit(2);
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.DriverContext

    // log the list of job conf parameters for reference
    LOG.info(sb.toString());

    MergeWork mergeWork = new MergeWork(inputPaths, outputDir);
    DriverContext driverCxt = new DriverContext();
    BlockMergeTask taskExec = new BlockMergeTask();
    taskExec.initialize(hiveConf, null, driverCxt);
    taskExec.setWork(mergeWork);
    int ret = taskExec.execute(driverCxt);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.DriverContext

  private int mergeFiles(Hive db, AlterTablePartMergeFilesDesc mergeFilesDesc)
      throws HiveException {
    // merge work only needs input and output.
    MergeWork mergeWork = new MergeWork(mergeFilesDesc.getInputDir(),
        mergeFilesDesc.getOutputDir());
    DriverContext driverCxt = new DriverContext();
    BlockMergeTask taskExec = new BlockMergeTask();
    taskExec.initialize(db.getConf(), null, driverCxt);
    taskExec.setWork(mergeWork);
    taskExec.setQueryPlan(this.getQueryPlan());
    int ret = taskExec.execute(driverCxt);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.DriverContext

    int ret;
    if (localtask) {
      memoryMXBean = ManagementFactory.getMemoryMXBean();
      MapredLocalWork plan = Utilities.deserializeMapRedLocalWork(pathData, conf);
      MapredLocalTask ed = new MapredLocalTask(plan, conf, isSilent);
      ret = ed.executeFromChildJVM(new DriverContext());

    } else {
      MapredWork plan = Utilities.deserializeMapRedWork(pathData, conf);
      ExecDriver ed = new ExecDriver(plan, conf, isSilent);
      ret = ed.execute(new DriverContext());
    }

    if (ret != 0) {
      System.exit(2);
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.DriverContext

  }

  private void executePlan() throws Exception {
    String testName = new Exception().getStackTrace()[1].getMethodName();
    MapRedTask mrtask = new MapRedTask();
    DriverContext dctx = new DriverContext ();
    mrtask.setWork(mr);
    mrtask.initialize(conf, null, dctx);
    int exitVal =  mrtask.execute(dctx);

    if (exitVal != 0) {
View Full Code Here

Examples of org.apache.hadoop.hive.ql.DriverContext

   * @throws Exception
   */
  private void executePlan(MapredWork mr, HiveConf hiveConf) throws Exception
  {
    MapRedTask mrtask = new MapRedTask();
    DriverContext dctx = new DriverContext();
    mrtask.setWork(mr);
    mrtask.initialize(hiveConf, null, dctx);
    int exitVal = mrtask.execute(dctx);

    if (exitVal != 0)
View Full Code Here

Examples of org.apache.hadoop.hive.ql.DriverContext

    int ret;
    if (localtask) {
      memoryMXBean = ManagementFactory.getMemoryMXBean();
      MapredLocalWork plan = Utilities.deserializePlan(pathData, MapredLocalWork.class, conf);
      MapredLocalTask ed = new MapredLocalTask(plan, conf, isSilent);
      ret = ed.executeFromChildJVM(new DriverContext());

    } else {
      MapredWork plan = Utilities.deserializePlan(pathData, MapredWork.class, conf);
      ExecDriver ed = new ExecDriver(plan, conf, isSilent);
      ret = ed.execute(new DriverContext());
    }

    if (ret != 0) {
      System.exit(ret);
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.DriverContext

    MergeWork mergeWork = new MergeWork(mergeFilesDesc.getInputDir(),
        mergeFilesDesc.getOutputDir());
    mergeWork.setListBucketingCtx(mergeFilesDesc.getLbCtx());
    mergeWork.resolveConcatenateMerge(db.getConf());
    mergeWork.setMapperCannotSpanPartns(true);
    DriverContext driverCxt = new DriverContext();
    BlockMergeTask taskExec = new BlockMergeTask();
    taskExec.initialize(db.getConf(), null, driverCxt);
    taskExec.setWork(mergeWork);
    taskExec.setQueryPlan(this.getQueryPlan());
    int ret = taskExec.execute(driverCxt);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.DriverContext

      ColumnTruncateWork truncateWork = new ColumnTruncateWork(
          truncateTableDesc.getColumnIndexes(), truncateTableDesc.getInputDir(),
          truncateTableDesc.getOutputDir());
      truncateWork.setListBucketingCtx(truncateTableDesc.getLbCtx());
      truncateWork.setMapperCannotSpanPartns(true);
      DriverContext driverCxt = new DriverContext();
      ColumnTruncateTask taskExec = new ColumnTruncateTask();
      taskExec.initialize(db.getConf(), null, driverCxt);
      taskExec.setWork(truncateWork);
      taskExec.setQueryPlan(this.getQueryPlan());
      return taskExec.execute(driverCxt);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.DriverContext

    // stats work
    statsWork.setPartialScanAnalyzeCommand(true);

    // partial scan task
    DriverContext driverCxt = new DriverContext();
    Task<PartialScanWork> psTask = TaskFactory.get(scanWork, parseCtx.getConf());
    psTask.initialize(parseCtx.getConf(), null, driverCxt);
    psTask.setWork(scanWork);

    // task dependency
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.