Package com.taobao.top.analysis.node.job

Examples of com.taobao.top.analysis.node.job.Job


    mixJobBuilder.setConfig(config);
    mixJobBuilder.init();
   
   
    Map<String, Job> jobs = mixJobBuilder.build();
    Job job = jobs.values().iterator().next();
    List<JobTaskResult> mergeing = new ArrayList<JobTaskResult>();
   
    for(JobTask task : job.getJobTasks())
    {
      mergeing.add(defaultAnalysisEngine.doAnalysis(task));
      jobTaskResultsQueue.offer(defaultAnalysisEngine.doAnalysis(task));
    }
   
    jobResultMerge.merge(job, branchResultQueue, jobTaskResultsQueue, false);
   
    JobTaskResult mergedJobTask = jobResultMerge.merge(job.getJobTasks().get(0), mergeing, false,true);
   
    //多线程,需要休息一会儿
    Thread.sleep(2000);
   
    Map<String, Map<String, Object>> mergedResult = job.getJobResult();
   
    String key = mergedResult.keySet().iterator().next();
    String key2 = mergedResult.get(key).keySet().iterator().next();
    Object value = mergedResult.get(key).get(key2);
   
View Full Code Here


    mixJobBuilder.setConfig(config);
    mixJobBuilder.init();
    fileJobExporter.init();
   
    Map<String, Job> jobs = mixJobBuilder.build();
    Job job = jobs.values().iterator().next();
   
    JobTask task = job.getJobTasks().get(0);
   
    job.setJobResult(defaultAnalysisEngine.doAnalysis(task).getResults());
   
    fileJobExporter.exportEntryData(job);
   
    Thread.sleep(1000);
   
    Map<String, Map<String, Object>> result = job.getJobResult();
    job.setJobResult(null);
   
    fileJobExporter.loadEntryData(job);
    fileJobExporter.loadEntryDataToTmp(job);
   
    Thread.sleep(1000);
   
    String key = result.keySet().iterator().next();
    String key2 = result.get(key).keySet().iterator().next();
    Object value = result.get(key).get(key2);
   
    Assert.assertEquals(job.getJobResult().get(key).get(key2), value);
    Assert.assertEquals(job.getDiskResult().get(key).get(key2), value);
   
     
    defaultAnalysisEngine.releaseResource();
    mixJobBuilder.releaseResource();
    fileJobExporter.releaseResource();
View Full Code Here

  }
 
  public static void main(String[] args) throws AnalysisException
  {
    Map<String, Map<String, Object>> resultPool = new HashMap<String, Map<String, Object>>();
    Job job = new Job();
   
    Map<String,Object> innPool = new HashMap<String,Object>();
    innPool.put("key1", "value1");
    innPool.put("key2", "value2");
    innPool.put("key3", "value3");
    innPool.put("key4", "value4");
    innPool.put("ceshi", new Double(1.0));
   
    resultPool.put("entry1", innPool);
   
    JobDataOperation.export(resultPool, "resultPool.tmp",false,true, job.getCursorMap());
    JobDataOperation.export(resultPool, "resultPool.tmp",false,false, job.getCursorMap());
   
    List<Map<String, Map<String, Object>>> resultPools = JobDataOperation.load(new File("resultPool.tmp"),false,job,false);
   
    Assert.assertEquals(2,resultPools.size());
    Assert.assertEquals("value4", resultPools.get(0).get("entry1").get("key4"));
    Assert.assertEquals("value4", resultPools.get(1).get("entry1").get("key4"));
    Assert.assertEquals(new Double(1.0), resultPools.get(0).get("entry1").get("ceshi"));
    Assert.assertEquals(new Double(1.0), resultPools.get(1).get("entry1").get("ceshi"));
   
   
    JobDataOperation.export(resultPool, "resultPool.tmp",true,true, job.getCursorMap());
    resultPools = JobDataOperation.load(new File("resultPool.tmp"),true,job,false);
   
    Assert.assertEquals(1,resultPools.size());
    Assert.assertEquals("value4", resultPools.get(0).get("entry1").get("key4"));
   
View Full Code Here

    //1、针对job的属性设置监听器,Listener模式
    //2、使用Observer模式
    protected void mergeAndExportJobs() {
        Iterator<Map.Entry<String, Job>> iter = jobs.entrySet().iterator();
        while(iter.hasNext()) {
          Job job = iter.next().getValue();
          if(job.getRebuildTag() == 2) {
              job.rebuild(0, null, this);
              continue;
          }
          if (!job.getJobTimeOut().get())
          {
            // 需要合并该job的task
            if (!job.isMerging().get() && job.needMerge()) {
                logger.warn("job " + job.getJobName()
                     + " complete tasks:" + job.getCompletedTaskCount().get() + ", merged tasks :" + job.getMergedTaskCount().get());
                final Job j = job;
                final BlockingQueue<JobMergedResult> branchResultQueue = branchResultQueuePool.get(j.getJobName());
                final BlockingQueue<JobTaskResult> jobTaskResultsQueue = jobTaskResultsQueuePool.get(j.getJobName());
   
                if (j.isMerging().compareAndSet(false, true))
                    eventProcessThreadPool.execute(new Runnable() {
                        public void run() {
                            try {
                                jobResultMerger.merge(j, branchResultQueue, jobTaskResultsQueue, true);
                            } catch (Throwable e) {
                                logger.error(e);
                            }
                            finally {
                                j.isMerging().set(false);
                            }
                        }
                    });
            }
          }
          else
          {
            // Job超时了, 尝试做一次主干merge
            //判断是否还有和主干合并的线程,如果没有可以设置完成标识
            boolean gotIt = job.getTrunkLock().writeLock().tryLock();
           
            if (gotIt)
            {
              try
              if(!job.isMerged().get())
                {
                  List<Map<String, Map<String, Object>>> mergeResults = new ArrayList<Map<String, Map<String, Object>>>();
                  new MergeJobOperation(job,0,mergeResults,config,branchResultQueuePool.get(job.getJobName())).run();
               
                  job.isMerged().set(true);
                  logger.warn("job is timeout, last merge trunk success!");
                }
              }
              finally
              {
                job.getTrunkLock().writeLock().unlock();
              }
             
            }
           
          }

            // 需要导出该job的数据
            if (!job.isExporting().get() && job.needExport()) {
                final Job j = job;

                if (j.isExporting().compareAndSet(false, true))
                    eventProcessThreadPool.execute(new Runnable() {
                        public void run() {
                            try {
                                // 虽然是多线程,但还是阻塞模式来做
                                jobExporter.exportReport(j, false);
                                j.isExported().set(true);
                            } catch (Throwable e) {
                                logger.error(e);
                            }
                            finally {
                                j.isExporting().set(false);
                            }

                            // 判断是否需要开始导出中间结果,放在外部不妨碍下一次的处理
                            exportOrCleanTrunk(j);
                        }
View Full Code Here

            masterNode.echoGetJobTasks(requestEvent.getSequence(), jobTasks, requestEvent.getChannel());
            return;
        }
        // 指定job
        if (jobName != null && jobs.containsKey(jobName)) {
            Job job = jobs.get(jobName);

            List<JobTask> tasks = job.getJobTasks();

            for (JobTask jobTask : tasks) {
                if (jobTask.getStatus().equals(JobTaskStatus.UNDO)) {
                    if (statusPool.replace(jobTask.getTaskId(), JobTaskStatus.UNDO, JobTaskStatus.DOING)) {
                        this.allocateTask(jobTask);
View Full Code Here

                {    
                  logger.error(new StringBuilder("taskId :").append(taskId).append("not exist!").toString());
                  continue;
                }
               
                Job job = jobs.get(jobTask.getJobName());
                if(job == null) {
                    logger.error(new StringBuilder("job :").append(jobTask.getJobName()).append("not exist!").toString());
                    continue;
                }

                if (statusPool.replace(taskId, JobTaskStatus.DOING, JobTaskStatus.DONE)
                        || statusPool.replace(taskId, JobTaskStatus.UNDO, JobTaskStatus.DONE)) {
                    logger.info("task " + jobTask.getJobName() + " of job " + job.getJobName() + " done");
                    jobTask.setStatus(JobTaskStatus.DONE);
                    jobTask.getTailCursor().compareAndSet(true, false);
                    jobTask.setEndTime(System.currentTimeMillis());
                    jobTask.setLastMergedEpoch(job.getEpoch().get());
                    job.getCompletedTaskCount().incrementAndGet();
                } else {
                    if(!this.config.getDispatchMaster()) {
                        jobTask.setStatus(JobTaskStatus.DONE);
                        jobTask.getTailCursor().compareAndSet(true, false);
                        jobTask.setEndTime(System.currentTimeMillis());
                        jobTask.setLastMergedEpoch(job.getEpoch().get());
                        statusPool.put(taskId, JobTaskStatus.DONE);
                        iter.remove();
                    }
                }
               
                //对jobTask的执行结果打点
                StringBuilder log = new StringBuilder(ReportUtil.SLAVE_LOG).append(",timeStamp=")
                          .append(System.currentTimeMillis()).append(",epoch=")
                          .append(job.getEpoch()).append(",jobName=");
                log.append(jobTask.getJobName()).append(",taskId=")
                  .append(jobTask.getTaskId()).append(",recycleCounter=")
                  .append(jobTask.getRecycleCounter().get()).append(",slaveIp=")
                  .append(jobTaskResult.getSlaveIp()).append(",efficiency=")
                  .append(jobTaskResult.getEfficiency()).append(",");
              
                JobTaskExecuteInfo executeInfo = jobTaskResult.getTaskExecuteInfos().get(jobTask.getTaskId());
               
                if (executeInfo != null) {
                    log.append("analysisConsume=").append(executeInfo.getAnalysisConsume()).append(",")
                        .append("jobDataSize=").append(executeInfo.getJobDataSize()).append(",").append("totalLine=")
                        .append(executeInfo.getTotalLine()).append(",").append("errorLine=")
                        .append(executeInfo.getErrorLine()).append(",").append("emptyLine=")
                        .append(executeInfo.getEmptyLine()).append(",fileBegin=").append(executeInfo.getFileBegin())
                        .append(",fileLength=").append(executeInfo.getFileLength());
                    if(jobTask.getInput().startsWith("hub:")) {
                        jobTask.setJobSourceTimeStamp(executeInfo.getTimestamp());
                        job.updateCursor(jobTask.getUrl(), executeInfo.getFileBegin(), executeInfo.getFileLength(), executeInfo.getTimestamp());
                    }
                }
                else
                  logger.error(new StringBuilder().append("taskId : ").
                      append(jobTask.getTaskId()).append(" executeInfo is null!").toString());
               
                ReportUtil.clusterLog(log.toString());
               
               
                //增加一块对于zookeeper的支持
            if (StringUtils.isNotEmpty(config.getZkServer()) && zk != null)
            {
              try
              {            
                ZKUtil.updateOrCreateNode(zk,new StringBuilder()
                      .append(ZKUtil.getGroupMasterZKPath(config.getGroupId()))
                      .append("/").append(config.getMasterName())
                      .append("/runtime/").append(job.getEpoch())
                      .append("/").append(jobTask.getJobName())
                      .append("/").append(jobTask.getTaskId()).toString(),log.toString().getBytes("UTF-8"));
               
              }
              catch(Exception ex)
View Full Code Here


    @Override
    public void clearJobData(String jobName) {

        Job job = jobs.get(jobName);

        if (job != null) {
            job.getJobResult().clear();

            if (logger.isWarnEnabled())
                logger.warn("clear job :" + job.getJobName() + " data.");
        }
    }
View Full Code Here

        Set<String> allMasters = new HashSet<String>();
       
        for(String j : instances)
        {
                    try {
                        Job job = new Job();
                        Rule rule = new Rule();
                        JobConfig jobconfig = new JobConfig();
                        job.setStatisticsRule(rule);
                        job.setJobConfig(jobconfig);
                        job.setJobName(j);

                        getConfigFromProps(j, jobconfig, prop);

                        if (jobconfig.getReportConfigs() == null
                                || (jobconfig.getReportConfigs() != null && jobconfig.getReportConfigs().length == 0)) {
                            throw new AnalysisException("job Config files should not be null!");
                        }

                        buildRule(jobconfig.getReportConfigs(), rule);
                       
                        //增加一个获得当前临时文件数据源游标的操作
//                        JobDataOperation jobDataOperation = new JobDataOperation(job,
//                            AnalysisConstants.JOBMANAGER_EVENT_LOADDATA,this.config);
//                        jobDataOperation.run();
                        JobDataOperation.getSourceTimeStamp(job, this.config);
//                        JobDataOperation.loadDataToTmp(job, this.config);
//                        JobDataOperation.loadData(job, this.config);
                       
                        buildTasks(job);
                        jobs.put(job.getJobName(), job);
                        this.jobConfigs.put(job.getJobName(),
                            new JobResource(job.getJobName(), jobconfig.getReportConfigs()));
                        if (job.getJobConfig().getSaveTmpResultToFile() == null && this.config != null)
                            job.getJobConfig().setSaveTmpResultToFile(
                                String.valueOf(this.config.getSaveTmpResultToFile()));
                        if (job.getJobConfig().getAsynLoadDiskFilePrecent() < 0 && this.config != null)
                            job.getJobConfig().setAsynLoadDiskFilePrecent(
                                String.valueOf(this.config.getAsynLoadDiskFilePrecent()));
                    }
                    catch (Throwable e) {
                        logger.error("build job error : " + j, e);
                    }
View Full Code Here

            continue;
         
          String jobName = fileName.substring(fileName.indexOf(AnalysisConstants.SPLIT_KEY) + AnalysisConstants.SPLIT_KEY.length(),
              fileName.indexOf(AnalysisConstants.TEMP_MASTER_DATAFILE_SUFFIX));
         
          Job job = jobs.get(jobName);
         
          List<Map<String, Map<String, Object>>> mergeResults = JobDataOperation.load(f, false,job,false);
         
          //如果合并成功,删除临时文件
          if (MergeJobOperation.mergeToTrunk(job, mergeResults, config))
View Full Code Here

TOP

Related Classes of com.taobao.top.analysis.node.job.Job

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.