Package com.taobao.zeus.jobs

Examples of com.taobao.zeus.jobs.JobContext


    }
    return super.run();
  }

  public static void main(String[] args) {
    JobContext context=JobContext.getTempJobContext();
    Map<String, String> map=new HashMap<String, String>();
    map.put("hadoop.ugi.name", "uginame");
    HierarchyProperties properties=new HierarchyProperties(map);
    context.setProperties(properties);
   
    new MapReduceJob(context);
  }
View Full Code Here


  public TablePreviewModel getPreviewData(PartitionModel model)
      throws GwtException {
    try {
      TablePreviewModel result;
      String path = model.getPath();
      JobContext jobContext = JobContext.getTempJobContext();
      jobContext.setProperties(new HierarchyProperties(
          new HashMap<String, String>()));
      jobContext.getProperties().setProperty("preview.hdfs.path", path);
      jobContext.getProperties().setProperty("preview.hdfs.inputFormat",
          model.getInputFormat());
      jobContext.getProperties().setProperty("preview.hdfs.isCompressed",
          String.valueOf(model.isCompressed()));
      Profile profile = profileManager.findByUid(LoginUser.getUser()
          .getUid());
      if (profile != null) {
        String ugi = profile.getHadoopConf().get(
            "hadoop.hadoop.job.ugi");
        jobContext.getProperties().setProperty(
            "preview.hadoop.job.ugi", ugi);
      }
      DataPreviewJob job = new DataPreviewJob(jobContext);
      job.run();
      String logContent = job.getJobContext().getJobHistory().getLog()
View Full Code Here

            }
            File direcotry=new File(workDir+File.separator+"job-processer-"+jobProcesser.getJobId());
            if(!direcotry.exists()){
              direcotry.mkdirs();
            }
            JobContext sub=new JobContext(jobContext.getRunType());
            sub.putData("depth", ++depth);
            Job job=createJob(sub,jb, history, direcotry.getAbsolutePath(), applicationContext);
            jobs.add(job);
          }
        }else{
          jobContext.getJobHistory().getLog().appendZeus("递归的JobProcesser处理单元深度过大,停止递归");
View Full Code Here

        if(jobStatus.getHistoryId()!=null){
          JobHistory history=jobHistoryManager.findJobHistory(jobStatus.getHistoryId());
          operator=history.getOperator();
          if(history.getStatus()==Status.RUNNING){
            try {
              JobContext temp=JobContext.getTempJobContext();
              temp.setJobHistory(history);
              new CancelHadoopJob(temp).run();
            } catch (Exception e) {
              //忽略
            }
          }
View Full Code Here

import com.taobao.zeus.socket.worker.WorkerContext;

public class WorkerHeartBeat {

  public ChannelFuture execute(WorkerContext context){
    JobContext jobContext=JobContext.getTempJobContext();
    MemUseRateJob job=new MemUseRateJob(jobContext, 1);
    try {
      int exitCode = -1;
      int count = 0;
      while(count<3 && exitCode!=0){
        count++;
        exitCode=job.run();
      }
      if(exitCode!=0) {
        ScheduleInfoLog.error("HeartBeat Shell Error",new Exception(jobContext.getJobHistory().getLog().getContent()));
        // 防止后面NPE
        jobContext.putData("mem", 1.0);
      }
    } catch (Exception e) {
      ScheduleInfoLog.error("memratejob", e);
    }
    HeartBeatMessage hbm=HeartBeatMessage.newBuilder().setMemRate(((Double)jobContext.getData("mem")).floatValue())
      .addAllDebugRunnings(context.getDebugRunnings().keySet())
      .addAllManualRunnings(context.getManualRunnings().keySet())
      .addAllRunnings(context.getRunnings().keySet())
      .setTimestamp(new Date().getTime()).build();
    Request req=Request.newBuilder().setRid(AtomicIncrease.getAndIncrement()).setOperate(Operate.HeartBeat).setBody(hbm.toByteString()).build();
View Full Code Here

              direcotry.mkdirs();
            }
            JobBean jb = context.getGroupManager()
                .getUpstreamJobBean(history.getJobId());

            final Job job = JobUtils.createJob(new JobContext(JobContext.MANUAL_RUN),
                jb, history, direcotry.getAbsolutePath(),
                context.getApplicationContext());
            context.getManualRunnings().put(historyId, job);

            Integer exitCode = -1;
View Full Code Here

                + "debug-" + history.getId());
            if (!direcotry.exists()) {
              direcotry.mkdirs();
            }
            final Job job = JobUtils.createDebugJob(
                new JobContext(JobContext.DEBUG_RUN), history, direcotry
                    .getAbsolutePath(), context
                    .getApplicationContext());
            context.getDebugRunnings().put(debugId, job);

            Integer exitCode = -1;
View Full Code Here

                + history.getId());
            if (!direcotry.exists()) {
              direcotry.mkdirs();
            }

            final Job job = JobUtils.createJob(new JobContext(JobContext.SCHEDULE_RUN),
                jb, history, direcotry.getAbsolutePath(),
                context.getApplicationContext());
            context.getRunnings().put(jobId, job);

            Integer exitCode = -1;
View Full Code Here

        } finally{
          IOUtils.closeQuietly(out);
        }
       
        JobHistory history=new JobHistory();
        JobContext jobContext=new JobContext();
        jobContext.setWorkDir(temp.getParent());
        jobContext.setJobHistory(history);
        jobContext.setProperties(new HierarchyProperties(new HashMap<String, String>()));
        UploadHdfsFileJob job=new UploadHdfsFileJob(jobContext, temp.getAbsolutePath(), hdfsLibPath);
        Integer exitCode=job.run();
        if(exitCode!=0){
          log.error(history.getLog().getContent());
          resp.getWriter().write(history.getLog().getContent());
View Full Code Here

TOP

Related Classes of com.taobao.zeus.jobs.JobContext

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.