Package org.apache.hadoop.hive.ql.exec

Examples of org.apache.hadoop.hive.ql.exec.ExecDriver$TaskInfo


      System.out.println("VMotioned!");
    }
    else
    {
      System.out.println("VMotion failed!");
      TaskInfo info = task.getTaskInfo();
      System.out.println(info.getError().getFault());
    }
    si.getServerConnection().logout();
  }
View Full Code Here


    {        
      return SUCCESS;
    }
    else
    {
      TaskInfo tinfo = (TaskInfo) getCurrentProperty(PROPNAME_INFO);        
      LocalizedMethodFault fault = tinfo.getError();
      String error = "Error Occured";
      if(fault!=null)
      {
        MethodFault mf = fault.getFault();
        throw mf;
View Full Code Here

            // wait if there are already VM snapshot task running
            ManagedObjectReference taskmgr = context.getServiceContent().getTaskManager();
            List<ManagedObjectReference> tasks = (ArrayList<ManagedObjectReference>)context.getVimClient().getDynamicProperty(taskmgr, "recentTask");

            for (ManagedObjectReference taskMor : tasks) {
                TaskInfo info = (TaskInfo)(context.getVimClient().getDynamicProperty(taskMor, "info"));

                if (info.getEntityName().equals(cmd.getVmName()) && info.getName().equalsIgnoreCase("CreateSnapshot_Task")) {
                    s_logger.debug("There is already a VM snapshot task running, wait for it");
                    context.getVimClient().waitForTask(taskMor);
                }
            }
View Full Code Here

            // wait if there are already VM revert task running
            ManagedObjectReference taskmgr = context.getServiceContent().getTaskManager();
            List<ManagedObjectReference> tasks = (ArrayList<ManagedObjectReference>)context.getVimClient().getDynamicProperty(taskmgr, "recentTask");

            for (ManagedObjectReference taskMor : tasks) {
                TaskInfo info = (TaskInfo)(context.getVimClient().getDynamicProperty(taskMor, "info"))
;
                if (info.getEntityName().equals(cmd.getVmName()) && info.getName().equalsIgnoreCase("RevertToSnapshot_Task")) {
                    s_logger.debug("There is already a VM snapshot task running, wait for it");
                    context.getVimClient().waitForTask(taskMor);
                }
            }
View Full Code Here

      stage.setStageType(task.getType());
      query.addToStageList(stage);
     
      if (task instanceof ExecDriver) {
        // populate map task
        ExecDriver mrTask = (ExecDriver)task;
        org.apache.hadoop.hive.ql.plan.api.Task mapTask = new org.apache.hadoop.hive.ql.plan.api.Task();
        mapTask.setTaskId(stage.getStageId() + "_MAP");
        mapTask.setTaskType(TaskType.MAP);
        stage.addToTaskList(mapTask);
        populateOperatorGraph(mapTask, mrTask.getWork().getAliasToWork().values());
       
        // populate reduce task
        if (mrTask.hasReduce()) {
          org.apache.hadoop.hive.ql.plan.api.Task reduceTask = new org.apache.hadoop.hive.ql.plan.api.Task();
          reduceTask.setTaskId(stage.getStageId() + "_REDUCE");
          reduceTask.setTaskType(TaskType.REDUCE);
          stage.addToTaskList(reduceTask);
          Collection<Operator<? extends Serializable>> reducerTopOps = new ArrayList<Operator<? extends Serializable>>();
          reducerTopOps.add(mrTask.getWork().getReducer());
          populateOperatorGraph(reduceTask, reducerTopOps);
        }
      }
      else {
        org.apache.hadoop.hive.ql.plan.api.Task otherTask = new org.apache.hadoop.hive.ql.plan.api.Task();
View Full Code Here

      }
      if (task.done()) {
        done.add(task.getId());
      }
      if (task instanceof ExecDriver) {
        ExecDriver mrTask = (ExecDriver)task;
        extractOperatorCounters(mrTask.getWork().getAliasToWork().values(), task.getId() + "_MAP");
        if (mrTask.mapStarted()) {
          started.add(task.getId() + "_MAP");
        }
        if (mrTask.mapDone()) {
          done.add(task.getId() + "_MAP");
        }
        if (mrTask.hasReduce()) {
          Collection<Operator<? extends Serializable>> reducerTopOps = new ArrayList<Operator<? extends Serializable>>();
          reducerTopOps.add(mrTask.getWork().getReducer());
          extractOperatorCounters(reducerTopOps, task.getId() + "_REDUCE");
          if (mrTask.reduceStarted()) {
            started.add(task.getId() + "_REDUCE");
          }
          if (mrTask.reduceDone()) {
            done.add(task.getId() + "_REDUCE");
          }
        }
      }
      else if (task instanceof ConditionalTask) {
View Full Code Here

      stage.setStageType(task.getType());
      query.addToStageList(stage);

      if (task instanceof ExecDriver) {
        // populate map task
        ExecDriver mrTask = (ExecDriver) task;
        org.apache.hadoop.hive.ql.plan.api.Task mapTask =
          new org.apache.hadoop.hive.ql.plan.api.Task();
        mapTask.setTaskId(stage.getStageId() + "_MAP");
        mapTask.setTaskType(TaskType.MAP);
        stage.addToTaskList(mapTask);
        populateOperatorGraph(mapTask, mrTask.getWork().getAliasToWork()
            .values());

        // populate reduce task
        if (mrTask.hasReduce()) {
          org.apache.hadoop.hive.ql.plan.api.Task reduceTask =
            new org.apache.hadoop.hive.ql.plan.api.Task();
          reduceTask.setTaskId(stage.getStageId() + "_REDUCE");
          reduceTask.setTaskType(TaskType.REDUCE);
          stage.addToTaskList(reduceTask);
          Collection<Operator<? extends OperatorDesc>> reducerTopOps =
            new ArrayList<Operator<? extends OperatorDesc>>();
          reducerTopOps.add(mrTask.getWork().getReducer());
          populateOperatorGraph(reduceTask, reducerTopOps);
        }
      } else {
        org.apache.hadoop.hive.ql.plan.api.Task otherTask =
          new org.apache.hadoop.hive.ql.plan.api.Task();
View Full Code Here

      }
      if (task.done()) {
        done.add(task.getId());
      }
      if (task instanceof ExecDriver) {
        ExecDriver mrTask = (ExecDriver) task;
        extractOperatorCounters(mrTask.getWork().getAliasToWork().values(),
            task.getId() + "_MAP");
        if (mrTask.mapStarted()) {
          started.add(task.getId() + "_MAP");
        }
        if (mrTask.mapDone()) {
          done.add(task.getId() + "_MAP");
        }
        if (mrTask.hasReduce()) {
          Collection<Operator<? extends OperatorDesc>> reducerTopOps =
            new ArrayList<Operator<? extends OperatorDesc>>();
          reducerTopOps.add(mrTask.getWork().getReducer());
          extractOperatorCounters(reducerTopOps, task.getId() + "_REDUCE");
          if (mrTask.reduceStarted()) {
            started.add(task.getId() + "_REDUCE");
          }
          if (mrTask.reduceDone()) {
            done.add(task.getId() + "_REDUCE");
          }
        }
      } else if (task instanceof ConditionalTask) {
        ConditionalTask cTask = (ConditionalTask) task;
View Full Code Here

      }
      if (task.done()) {
        done.add(task.getId());
      }
      if (task instanceof ExecDriver) {
        ExecDriver mrTask = (ExecDriver) task;
        extractOperatorCounters(mrTask.getWork().getAliasToWork().values(),
            task.getId() + "_MAP");
        if (mrTask.mapStarted()) {
          started.add(task.getId() + "_MAP");
        }
        if (mrTask.mapDone()) {
          done.add(task.getId() + "_MAP");
        }
        if (mrTask.hasReduce()) {
          Collection<Operator<? extends Serializable>> reducerTopOps =
            new ArrayList<Operator<? extends Serializable>>();
          reducerTopOps.add(mrTask.getWork().getReducer());
          extractOperatorCounters(reducerTopOps, task.getId() + "_REDUCE");
          if (mrTask.reduceStarted()) {
            started.add(task.getId() + "_REDUCE");
          }
          if (mrTask.reduceDone()) {
            done.add(task.getId() + "_REDUCE");
          }
        }
      } else if (task instanceof ConditionalTask) {
        ConditionalTask cTask = (ConditionalTask) task;
View Full Code Here

      stage.setStageType(task.getType());
      query.addToStageList(stage);

      if (task instanceof ExecDriver) {
        // populate map task
        ExecDriver mrTask = (ExecDriver) task;
        org.apache.hadoop.hive.ql.plan.api.Task mapTask =
          new org.apache.hadoop.hive.ql.plan.api.Task();
        mapTask.setTaskId(stage.getStageId() + "_MAP");
        mapTask.setTaskType(TaskType.MAP);
        stage.addToTaskList(mapTask);
        populateOperatorGraph(mapTask, mrTask.getWork().getAliasToWork()
            .values());

        // populate reduce task
        if (mrTask.hasReduce()) {
          org.apache.hadoop.hive.ql.plan.api.Task reduceTask =
            new org.apache.hadoop.hive.ql.plan.api.Task();
          reduceTask.setTaskId(stage.getStageId() + "_REDUCE");
          reduceTask.setTaskType(TaskType.REDUCE);
          stage.addToTaskList(reduceTask);
          Collection<Operator<? extends Serializable>> reducerTopOps =
            new ArrayList<Operator<? extends Serializable>>();
          reducerTopOps.add(mrTask.getWork().getReducer());
          populateOperatorGraph(reduceTask, reducerTopOps);
        }
      } else {
        org.apache.hadoop.hive.ql.plan.api.Task otherTask =
          new org.apache.hadoop.hive.ql.plan.api.Task();
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.exec.ExecDriver$TaskInfo

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.