Package com.taobao.top.analysis.node.job

Examples of com.taobao.top.analysis.node.job.JobTaskResult


      jobTaskResultsQueue.offer(defaultAnalysisEngine.doAnalysis(task));
    }
   
    jobResultMerge.merge(job, branchResultQueue, jobTaskResultsQueue, false);
   
    JobTaskResult mergedJobTask = jobResultMerge.merge(job.getJobTasks().get(0), mergeing, false,true);
   
    //多线程,需要休息一会儿
    Thread.sleep(2000);
   
    Map<String, Map<String, Object>> mergedResult = job.getJobResult();
   
    String key = mergedResult.keySet().iterator().next();
    String key2 = mergedResult.get(key).keySet().iterator().next();
    Object value = mergedResult.get(key).get(key2);
   
    Assert.assertEquals(mergedJobTask.getResults().get(key).get(key2), value);
     
    defaultAnalysisEngine.releaseResource();
    mixJobBuilder.releaseResource();   
    jobResultMerge.releaseResource();
  }
View Full Code Here


      for(JobTask jobtask : tasks)
      {
        taskResults.add(defaultAnalysisEngine.doAnalysis(jobtask));
      }
     
      JobTaskResult jobTaskResult = jobResultMerger.merge(tasks.get(0), taskResults,true,true);
     
      defaultAnalysisEngine.doExport(tasks.get(0), jobTaskResult);
    }
    System.out.println(System.currentTimeMillis() - time);
    fileJobExporter.releaseResource();
View Full Code Here

                        config.getAlertFrom(),
                        config.getAlertModel(),
                        config.getAlertWangWang(),
                        "Can't connect resource:" + jobTask.getInput());
          }
          JobTaskResult jobTaskResult = new JobTaskResult();
            jobTaskResult.setJobName(jobTask.getJobName());
            jobTaskResult.addTaskId(jobTask.getTaskId());
            jobTaskResult.setJobEpoch(jobTask.getJobEpoch());
            taskExecuteInfo.setAnalysisConsume(0);
              taskExecuteInfo.setEmptyLine(0);
              taskExecuteInfo.setErrorLine(0);
              taskExecuteInfo.setJobDataSize(0);
              taskExecuteInfo.setTotalLine(0);
              taskExecuteInfo.setTaskId(jobTask.getTaskId());
              taskExecuteInfo.setSuccess(false);
             
              jobTaskResult.addTaskExecuteInfo(taskExecuteInfo);
        logger.error("Input not found! input : " + jobTask.getInput());
        return jobTaskResult;
      }
     
      return analysis(in,jobTask, taskExecuteInfo);
View Full Code Here

  {
   
    String encoding = jobtask.getInputEncoding();
    String splitRegex = jobtask.getSplitRegex();
   
    JobTaskResult jobTaskResult = new JobTaskResult();
    jobTaskResult.setJobName(jobtask.getJobName());
    jobTaskResult.addTaskId(jobtask.getTaskId());
    jobTaskResult.setJobEpoch(jobtask.getJobEpoch());
   
    Map<String, ReportEntry> entryPool = jobtask.getStatisticsRule().getEntryPool();
   
    BufferedReader reader = new BufferedReader(new InputStreamReader(in, encoding));
   
    int normalLine = 0;//正常的行数
    int emptyLine=0;//拉取空行的次数
    int exceptionLine=0;//一行中,只要entry有异常,该行就是存在异常的行。
    int size = 0;
    String record;
   
    ReportEntry entry = null;
   
    long beg = System.currentTimeMillis();
   
    try
    {
      //逐行处理
      while ((record = reader.readLine()) != null)
      {
        boolean failure=false;
       
        try
        {
          if (record == null || "".equals(record))
          {
            emptyLine++;
            continue;
          }
         
          size += record.getBytes().length;
         
          String[] contents = StringUtils.splitByWholeSeparatorPreserveAllTokens(record, splitRegex);
          Iterator<String> keys = entryPool.keySet().iterator();
          while (keys.hasNext())
          {
            try
            {
              String key = keys.next();
              entry = entryPool.get(key);
              if(!entry.isLazy()){
                  processSingleLine(entry, contents,jobtask,jobTaskResult, taskExecuteInfo);
//                if(!processSingleLine(entry, contents,jobtask,jobTaskResult, taskExecuteInfo)) {
//                    if(entry.getReports().contains("appAuthReport"))
//                        logger.error("key null, record:" + record);
//                }
              }
             
            }
            catch (Throwable e)
            {
                            if (!failure)
                                exceptionLine++;

                            failure = true;

                             if (!threshold.sholdBlock())
                            logger.error(
                                new StringBuilder().append("Entry :").append(entry.getId()).append(", job : ")
                                    .append(jobtask.getJobName()).append(", entry:").append(entry.getName())
                                    .append("\r\n record: ").append(record).toString(), e);
                        }
          }

          if(!failure)
            normalLine++;
         
        }
        catch(Throwable t)
        {
          if(!failure)
            exceptionLine++;
         
          if (!threshold.sholdBlock())
            logger.error(
              new StringBuilder()
                  .append("\r\n record: ").append(record)
                  .toString(), t);
        }
      }
     
    }
    catch (Throwable ex) {
      taskExecuteInfo.setSuccess(false);
      logger.error(ex,ex);
    }
    finally
    {
      if (reader != null)
      {
        try {
          reader.close();
          reader = null;
        }
        catch (Throwable ex) {
          logger.error(ex,ex);
        }
      }
     
      taskExecuteInfo.setAnalysisConsume(System.currentTimeMillis() - beg);
      taskExecuteInfo.setEmptyLine(emptyLine);
      taskExecuteInfo.setErrorLine(exceptionLine);
      taskExecuteInfo.setJobDataSize(size);
      taskExecuteInfo.setTotalLine(normalLine+exceptionLine+emptyLine);
      taskExecuteInfo.setTaskId(jobtask.getTaskId());
      taskExecuteInfo.setSuccess(true);
     
      jobTaskResult.addTaskExecuteInfo(taskExecuteInfo);
     
      if (logger.isWarnEnabled())
        logger.warn(new StringBuilder("jobtask ").append(jobtask.getTaskId())
          .append(",analysis consume time: ").append(taskExecuteInfo.getAnalysisConsume())
          .append(",normal line count: ").append(normalLine)
View Full Code Here

        for(JobTask jobtask : tasks)
        {
          taskResults.add(defaultAnalysisEngine.doAnalysis(jobtask));
        }
       
        JobTaskResult jobTaskResult = jobResultMerger.merge(tasks.get(0), taskResults,true,true);
       
        defaultAnalysisEngine.doExport(tasks.get(0), jobTaskResult);
      }
    }
View Full Code Here

   
    for(Job job : jobs.values())
    {
      JobTask task = job.getJobTasks().get(0);
     
      JobTaskResult jobTaskResult = defaultAnalysisEngine.doAnalysis(task)
     
      fileJobExporter.exportReport(task, jobTaskResult, false);
    }
     
    defaultAnalysisEngine.releaseResource();
View Full Code Here

   
    SendResultsRequestEvent jobResponseEvent = new SendResultsRequestEvent("1234");
 
    JobTask task = jobManager.getJobs().values().iterator().next().getJobTasks().get(0);
   
    JobTaskResult jobTaskResult = defaultAnalysisEngine.doAnalysis(task)
 
    jobResponseEvent.setJobTaskResult(jobTaskResult);
   
    jobManager.addTaskResultToQueue(jobResponseEvent);
   
   
    JobTaskResult jobTaskResult2 = jobManager.getJobTaskResultsQueuePool().get(task.getJobName()).poll();
   
    Assert.assertEquals(jobTaskResult, jobTaskResult2);
   
    SendResultsResponseEvent sendResultsResponseEvent = (SendResultsResponseEvent)tunnel.getSlaveSide().poll();
   
View Full Code Here

    // 分配任务和结果提交处理由于是单线程处理,
    // 因此本身不用做状态池并发控制,将消耗较多的发送操作交给ServerConnector多线程操作
    @Override
    public void addTaskResultToQueue(SendResultsRequestEvent jobResponseEvent) {

        JobTaskResult jobTaskResult = jobResponseEvent.getJobTaskResult();

        if (jobTaskResult.getTaskIds() != null && jobTaskResult.getTaskIds().size() > 0) {
            // 判断是否是过期的一些老任务数据,根据task和taskresult的createtime来判断
            // 以后要扩展成为如果发现当前的epoch < 结果的epoch,表明这台可能是从属的master,负责reduce,但是速度跟不上了
            if(jobTaskPool.get(jobTaskResult.getTaskIds().get(0)) == null) {
                logger.error("jobTask is null " + jobTaskResult.getTaskIds().get(0));
                masterNode.echoSendJobTaskResults(jobResponseEvent.getSequence(), "success", jobResponseEvent.getChannel());
                return;
            }
            if (jobTaskResult.getJobEpoch() != jobTaskPool.get(jobTaskResult.getTaskIds().get(0)).getJobEpoch() && this.config.getDispatchMaster()) {
             
              // 结果过期, 肯能是任务超时后, 被重新分配了
                if (jobTaskResult.getJobEpoch() < jobTaskPool.get(jobTaskResult.getTaskIds().get(0)).getJobEpoch()) {
                    logger.error("old task result will be discard! job:" + jobTaskPool.get(jobTaskResult.getTaskIds().get(0)).getJobName() + ",epoch:" + jobTaskResult.getJobEpoch() + ",slave:" + jobResponseEvent.getChannel());
                    masterNode.echoSendJobTaskResults(jobResponseEvent.getSequence(), "success", jobResponseEvent.getChannel());
                    return;
                }
                else {
                    // 给一定的容忍时间,暂时定为5秒
                    jobs.get(jobTaskPool.get(jobTaskResult.getTaskIds().get(0)).getJobName()).blockToResetJob(15000);
                   
                    // 这块有点疑问, 什么情况会出现
                    if (jobTaskResult.getJobEpoch() > jobTaskPool.get(jobTaskResult.getTaskIds().get(0)).getJobEpoch()) {
                        logger.error("otherMaster can't merge in time!job:" + jobTaskPool.get(jobTaskResult.getTaskIds().get(0)).getJobName() + ",taskResult epoch:" + jobTaskResult.getJobEpoch() + ", task epoch:" + jobTaskPool.get(jobTaskResult.getTaskIds().get(0)).getJobEpoch());
                        masterNode.echoSendJobTaskResults(jobResponseEvent.getSequence(), "success", jobResponseEvent.getChannel());
                        if(!this.config.getDispatchMaster()) {
                            jobs.get(jobTaskResult.getJobName()).reset(this);
                        } else {
                            return;
                        }
                    }
                }
            }

            if (logger.isWarnEnabled()) {
                StringBuilder ts =
                        new StringBuilder("Receive slave analysis result, jobTaskIds : ")
                            .append(jobTaskResult.toString()).append(", ").append(jobTaskResult.getTaskIds().size());
                logger.warn(ts.toString());
            }
            if(jobs.get(jobTaskPool.get(jobTaskResult.getTaskIds().get(0)).getJobName()).isMerged().get()) {
                masterNode.echoSendJobTaskResults(jobResponseEvent.getSequence(), "success", jobResponseEvent.getChannel());
                return;
            }

            // 先放入队列,防止小概率多线程并发问题
            jobTaskResultsQueuePool.get(jobTaskPool.get(jobTaskResult.getTaskIds().get(0)).getJobName()).offer(
                jobTaskResult);
            if(logger.isInfoEnabled()) {
                StringBuilder sb = new StringBuilder("add result [");
                for(String s : jobTaskResult.getTaskIds()) {
                    sb.append(s).append(",");
                }
                sb.append("] to queue:").append(jobTaskPool.get(jobTaskResult.getTaskIds().get(0)).getJobName());
                logger.info(sb.toString());
            }

            Iterator<String> iter = jobTaskResult.getTaskIds().iterator();
            while (iter.hasNext()) {
                String taskId = iter.next();
                JobTask jobTask = jobTaskPool.get(taskId);
               
                if (jobTask == null)
                {    
                  logger.error(new StringBuilder("taskId :").append(taskId).append("not exist!").toString());
                  continue;
                }
               
                Job job = jobs.get(jobTask.getJobName());
                if(job == null) {
                    logger.error(new StringBuilder("job :").append(jobTask.getJobName()).append("not exist!").toString());
                    continue;
                }

                if (statusPool.replace(taskId, JobTaskStatus.DOING, JobTaskStatus.DONE)
                        || statusPool.replace(taskId, JobTaskStatus.UNDO, JobTaskStatus.DONE)) {
                    logger.info("task " + jobTask.getJobName() + " of job " + job.getJobName() + " done");
                    jobTask.setStatus(JobTaskStatus.DONE);
                    jobTask.getTailCursor().compareAndSet(true, false);
                    jobTask.setEndTime(System.currentTimeMillis());
                    jobTask.setLastMergedEpoch(job.getEpoch().get());
                    job.getCompletedTaskCount().incrementAndGet();
                } else {
                    if(!this.config.getDispatchMaster()) {
                        jobTask.setStatus(JobTaskStatus.DONE);
                        jobTask.getTailCursor().compareAndSet(true, false);
                        jobTask.setEndTime(System.currentTimeMillis());
                        jobTask.setLastMergedEpoch(job.getEpoch().get());
                        statusPool.put(taskId, JobTaskStatus.DONE);
                        iter.remove();
                    }
                }
               
                //对jobTask的执行结果打点
                StringBuilder log = new StringBuilder(ReportUtil.SLAVE_LOG).append(",timeStamp=")
                          .append(System.currentTimeMillis()).append(",epoch=")
                          .append(job.getEpoch()).append(",jobName=");
                log.append(jobTask.getJobName()).append(",taskId=")
                  .append(jobTask.getTaskId()).append(",recycleCounter=")
                  .append(jobTask.getRecycleCounter().get()).append(",slaveIp=")
                  .append(jobTaskResult.getSlaveIp()).append(",efficiency=")
                  .append(jobTaskResult.getEfficiency()).append(",");
              
                JobTaskExecuteInfo executeInfo = jobTaskResult.getTaskExecuteInfos().get(jobTask.getTaskId());
               
                if (executeInfo != null) {
                    log.append("analysisConsume=").append(executeInfo.getAnalysisConsume()).append(",")
                        .append("jobDataSize=").append(executeInfo.getJobDataSize()).append(",").append("totalLine=")
                        .append(executeInfo.getTotalLine()).append(",").append("errorLine=")
View Full Code Here

      if (jobTasks.length == 1)
      {
        try
        {
          //计算并输出
          JobTaskResult jobTaskResult = statisticsEngine.doAnalysis(jobTasks[0]);
         
          long taskConsumeTime = System.currentTimeMillis() - timer;
          jobTaskResult.setEfficiency((taskConsumeTime + this.hardWorkTimer.get())
                  /(System.currentTimeMillis()  - this.nodeStartTimeStamp));
         
          if (jobTaskResult != null) {
            handleTaskResult(jobTasks[0],jobTaskResult);
          }
View Full Code Here

      //批量发送消息
      final CountDownLatch taskCountDownLatch = new CountDownLatch(_masterEntryResults.size());
         
            for (Entry<String, Map<String, Map<String, Object>>> e : _masterEntryResults.entrySet()) {
                final Entry<String, Map<String, Map<String, Object>>> entrySet = e;
                final JobTaskResult tResult = jobTaskResult.cloneWithOutResults();
                tResult.setResults(entrySet.getValue());
                tResult.setJobName(jobTask.getJobName());
                tResult.setResultKey(entrySet.getKey());

                if (!this.config.getMultiSendResult()) {
                    try {
                        // 这里应该是这样的吧:
                        String masterAddr = entrySet.getKey();
                        // String masterAddr =
                        // entrySet.getKey().substring(entrySet.getKey().indexOf(":")+1);
                        SendResultsRequestEvent event = generateSendResultsRequestEvent(tResult);
                        String result = slaveConnector.sendJobTaskResults(event, entrySet.getKey());
                        if (result == null) {
                            Thread.sleep(100);
                            logger.warn("try to send result to master : " + masterAddr + " again.");
                            result = slaveConnector.sendJobTaskResults(event, masterAddr);

                            // 开始写入本地文件
                            if (result == null) {
                                logger.error(new StringBuilder("send result to master : ").append(entrySet.getKey())
                                    .append(" fail again! now to write file to local,jobName : ")
                                    .append(jobTask.getJobName()).toString());

                                String destFile = getTempStoreDataFile(entrySet.getKey(), jobTask.getJobName());

                                if (destFile != null) {
                                    JobDataOperation.export(event.getJobTaskResult().getResults(), destFile, false,
                                        false, (new HashMap<String, Long>()));
                                }
                            }
                        }

                        logger.info("send piece result to master :" + entrySet.getKey() + ", size:"
                                + entrySet.getValue().size() + ", result:" + result);
                    }
                    catch (Throwable e1) {
                        logger.error(e1, e1);
                    }
                }
                else {

                    analysisWorkerThreadPool.execute(new Runnable() {
                        public void run() {
                            try {
                                // 这里应该是这样的吧:
                                String masterAddr = entrySet.getKey();
                                // String masterAddr =
                                // entrySet.getKey().substring(entrySet.getKey().indexOf(":")+1);
                                SendResultsRequestEvent event = generateSendResultsRequestEvent(tResult);
                                String result = slaveConnector.sendJobTaskResults(event, entrySet.getKey());

                                // 做一次重试
                                if (result == null) {
                                    Thread.sleep(100);
                                    logger.warn("try to send result to master : " + masterAddr + " again.");
                                    result = slaveConnector.sendJobTaskResults(event, masterAddr);

                                    // 开始写入本地文件
                                    if (result == null) {
                                        logger.error(new StringBuilder("send result to master : ")
                                            .append(entrySet.getKey())
                                            .append(" fail again! now to write file to local,jobName : ")
                                            .append(jobTask.getJobName()).toString());

                                        String destFile = getTempStoreDataFile(entrySet.getKey(), jobTask.getJobName());

                                        if (destFile != null) {
                                            JobDataOperation.export(event.getJobTaskResult().getResults(), destFile,
                                                false, false, (new HashMap<String, Long>()));
                                        }
                                    }
                                }

                                logger.info("send piece result to master :" + entrySet.getKey() + ", size:"
                                        + entrySet.getValue().size() + ", result:" + result);
                            }
                            catch (Exception e) {
                                logger.error(e, e);
                            }
                            finally {
                                taskCountDownLatch.countDown();
                            }
                        }
                    });
                }
            }
            if (config.getMultiSendResult()) {
                try {
                    if (!taskCountDownLatch.await(this.config.getMaxSendResultTime(), TimeUnit.SECONDS))
                        logger.error("send piece result to master timeout !");
                }
                catch (InterruptedException e2) {
                    // do nothing
                }
            }
     
      //暂时采用策略,将所有结果向所有其他master投递一个空的结果集,以确保所有master都有收到
            //任务结束的通知,这里代码很丑陋啊~~
      final CountDownLatch taskCountDown = new CountDownLatch((masters.size() - _masterEntryResults.size()));
      for(final String master : masters) {
                if(_masterEntryResults.get(master) != null)
                    continue;
                final JobTaskResult tResult = jobTaskResult.cloneWithOutResults();
                tResult.setJobName(jobTask.getJobName());
                analysisWorkerThreadPool.execute(
                    new Runnable()
                    {
                        public void run()
                        {
View Full Code Here

TOP

Related Classes of com.taobao.top.analysis.node.job.JobTaskResult

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.