Package com.taobao.top.analysis.statistics

Examples of com.taobao.top.analysis.statistics.StatisticsEngine


    SocketSlaveConnector slaveConnector = new SocketSlaveConnector();
    SlaveMonitor monitor = new SlaveMonitor();
    slaveConnector.setDownstreamHandler(new ObjectEncoder());
    slaveConnector.setUpstreamHandler(new ObjectDecoder());
     
    StatisticsEngine statisticsEngine = new StatisticsEngine();
    SlaveConfig slaveConfig = new SlaveConfig();
    slaveConfig.load(configfile);
    slaveNode.setConfig(slaveConfig);
    slaveNode.setSlaveConnector(slaveConnector);
    slaveNode.setStatisticsEngine(statisticsEngine);
    slaveNode.setJobResultMerger(jobResultMerger2);
    slaveNode.setMonitor(monitor);
   
    IInputAdaptor fileInputAdaptor =  new FileInputAdaptor();
    IInputAdaptor httpInputAdaptor = new HttpInputAdaptor();
    IInputAdaptor hdfsInputAdaptor = new HdfsInputAdaptor();
    IOutputAdaptor fileOutAdaptor = new FileOutputAdaptor();
   
    FileJobExporter fileJobExporter2 = new FileJobExporter();
    fileJobExporter2.setMaxCreateReportWorker(2);
    fileJobExporter2.init();
   
    ((FileOutputAdaptor)fileOutAdaptor).setJobExporter(fileJobExporter2);
   
    statisticsEngine.addInputAdaptor(fileInputAdaptor);
    statisticsEngine.addInputAdaptor(httpInputAdaptor);
    statisticsEngine.addInputAdaptor(hdfsInputAdaptor);
    statisticsEngine.addOutputAdaptor(fileOutAdaptor);
   
    if (needStart)
      slaveNode.startNode();
   
    return slaveNode;
View Full Code Here


    //build SlaveNode
    SlaveNode slaveNode = new SlaveNode();
    JobResultMerger jobResultMerger2 = new JobResultMerger();
    MemSlaveConnector slaveConnector = new MemSlaveConnector();
    slaveConnector.setTunnel(tunnel);
    StatisticsEngine statisticsEngine = new StatisticsEngine();
    SlaveConfig slaveConfig = new SlaveConfig();
    slaveConfig.load("slave-config.properties");
    slaveNode.setConfig(slaveConfig);
    slaveNode.setSlaveConnector(slaveConnector);
    slaveNode.setStatisticsEngine(statisticsEngine);
    slaveNode.setJobResultMerger(jobResultMerger2);
   
    IInputAdaptor fileInputAdaptor =  new FileInputAdaptor();
    IInputAdaptor httpInputAdaptor = new HttpInputAdaptor();
    IOutputAdaptor fileOutAdaptor = new FileOutputAdaptor();

   
    FileJobExporter fileJobExporter2 = new FileJobExporter();
    fileJobExporter2.setMaxCreateReportWorker(2);
    fileJobExporter2.init();
   
    ((FileOutputAdaptor)fileOutAdaptor).setJobExporter(fileJobExporter2);
   
    statisticsEngine.addInputAdaptor(fileInputAdaptor);
    statisticsEngine.addInputAdaptor(httpInputAdaptor);
    statisticsEngine.addOutputAdaptor(fileOutAdaptor);
    slaveNode.startNode();
   
   
    Thread.sleep(30 * 1000);
   
 
View Full Code Here

   
    BlockingQueue<JobMergedResult> branchResultQueue = new LinkedBlockingQueue<JobMergedResult>();
    BlockingQueue<JobTaskResult> jobTaskResultsQueue = new LinkedBlockingQueue<JobTaskResult>();
   
   
    StatisticsEngine defaultAnalysisEngine = new StatisticsEngine();
    defaultAnalysisEngine.init();
   
    IInputAdaptor fileInputAdaptor =  new FileInputAdaptor();
    IInputAdaptor httpInputAdaptor = new HttpInputAdaptor();
   
    defaultAnalysisEngine.addInputAdaptor(fileInputAdaptor);
    defaultAnalysisEngine.addInputAdaptor(httpInputAdaptor);
   
    MixJobBuilder mixJobBuilder = new MixJobBuilder();
    mixJobBuilder.setConfig(config);
    mixJobBuilder.init();
   
   
    Map<String, Job> jobs = mixJobBuilder.build();
    Job job = jobs.values().iterator().next();
    List<JobTaskResult> mergeing = new ArrayList<JobTaskResult>();
   
    for(JobTask task : job.getJobTasks())
    {
      mergeing.add(defaultAnalysisEngine.doAnalysis(task));
      jobTaskResultsQueue.offer(defaultAnalysisEngine.doAnalysis(task));
    }
   
    jobResultMerge.merge(job, branchResultQueue, jobTaskResultsQueue, false);
   
    JobTaskResult mergedJobTask = jobResultMerge.merge(job.getJobTasks().get(0), mergeing, false,true);
   
    //多线程,需要休息一会儿
    Thread.sleep(2000);
   
    Map<String, Map<String, Object>> mergedResult = job.getJobResult();
   
    String key = mergedResult.keySet().iterator().next();
    String key2 = mergedResult.get(key).keySet().iterator().next();
    Object value = mergedResult.get(key).get(key2);
   
    Assert.assertEquals(mergedJobTask.getResults().get(key).get(key2), value);
     
    defaultAnalysisEngine.releaseResource();
    mixJobBuilder.releaseResource();   
    jobResultMerge.releaseResource();
  }
View Full Code Here

*/
public class DefaultAnalysisEngineTest {

  @Test
  public void testDoAnalysis() throws Exception {
    StatisticsEngine defaultAnalysisEngine = new StatisticsEngine();
    defaultAnalysisEngine.init();
   
    IInputAdaptor fileInputAdaptor =  new FileInputAdaptor();
    IInputAdaptor httpInputAdaptor = new HttpInputAdaptor();
    FileOutputAdaptor fileOutAdaptor = new FileOutputAdaptor();
   
    FileJobExporter fileJobExporter = new FileJobExporter();
    fileJobExporter.setMaxCreateReportWorker(2);
    fileJobExporter.init();
   
    fileOutAdaptor.setJobExporter(fileJobExporter);
   
    JobResultMerger jobResultMerger = new JobResultMerger();
    jobResultMerger.init();
   
    defaultAnalysisEngine.addInputAdaptor(fileInputAdaptor);
    defaultAnalysisEngine.addInputAdaptor(httpInputAdaptor);
    defaultAnalysisEngine.addOutputAdaptor(fileOutAdaptor);
     
    FileJobBuilder jobBuilder = new FileJobBuilder();
    jobBuilder.init();
    Map<String,Job> jobs = jobBuilder.build("jobs-config-engine.properties");
    long time = System.currentTimeMillis();
    for(Job job : jobs.values())
    {
      List<JobTask> tasks = job.getJobTasks();
     
      List<JobTaskResult> taskResults = new ArrayList<JobTaskResult>();
     
      for(JobTask jobtask : tasks)
      {
        taskResults.add(defaultAnalysisEngine.doAnalysis(jobtask));
      }
     
      JobTaskResult jobTaskResult = jobResultMerger.merge(tasks.get(0), taskResults,true,true);
     
      defaultAnalysisEngine.doExport(tasks.get(0), jobTaskResult);
    }
    System.out.println(System.currentTimeMillis() - time);
    fileJobExporter.releaseResource();
    jobResultMerger.releaseResource();
    defaultAnalysisEngine.releaseResource();
   
  }
View Full Code Here

*/
public class DefaultAnalysisEngineTestBig {

  @Test
  public void testDoAnalysis() throws Exception {
    StatisticsEngine defaultAnalysisEngine = new StatisticsEngine();
    defaultAnalysisEngine.init();
   
    IInputAdaptor fileInputAdaptor =  new FileInputAdaptor();
    IInputAdaptor httpInputAdaptor = new HttpInputAdaptor();
    FileOutputAdaptor fileOutAdaptor = new FileOutputAdaptor();
   
    FileJobExporter fileJobExporter = new FileJobExporter();
    fileJobExporter.setMaxCreateReportWorker(2);
    fileJobExporter.init();
   
    fileOutAdaptor.setJobExporter(fileJobExporter);
   
    JobResultMerger jobResultMerger = new JobResultMerger();
    jobResultMerger.init();
   
    defaultAnalysisEngine.addInputAdaptor(fileInputAdaptor);
    defaultAnalysisEngine.addInputAdaptor(httpInputAdaptor);
    defaultAnalysisEngine.addOutputAdaptor(fileOutAdaptor);
     
    FileJobBuilder jobBuilder = new FileJobBuilder();
    Map<String,Job> jobs = jobBuilder.build("jobs-config-engine-big.properties");
   
    for(int i =0 ; i < 100; i++)
    {
   
      for(Job job : jobs.values())
      {
        List<JobTask> tasks = job.getJobTasks();
       
        List<JobTaskResult> taskResults = new ArrayList<JobTaskResult>();
       
        for(JobTask jobtask : tasks)
        {
          taskResults.add(defaultAnalysisEngine.doAnalysis(jobtask));
        }
       
        JobTaskResult jobTaskResult = jobResultMerger.merge(tasks.get(0), taskResults,true,true);
       
        defaultAnalysisEngine.doExport(tasks.get(0), jobTaskResult);
      }
    }

    fileJobExporter.releaseResource();
    jobResultMerger.releaseResource();
    defaultAnalysisEngine.releaseResource();
   
    Thread.sleep(100);
   
  }
View Full Code Here

public class FileJobExporterTest {

 
  @Test
  public void testExportReportJob() throws AnalysisException, UnsupportedEncodingException, IOException {
    StatisticsEngine defaultAnalysisEngine = new StatisticsEngine();
    defaultAnalysisEngine.init();
   
    IInputAdaptor fileInputAdaptor =  new FileInputAdaptor();
    IInputAdaptor httpInputAdaptor = new HttpInputAdaptor();
   
    defaultAnalysisEngine.addInputAdaptor(fileInputAdaptor);
    defaultAnalysisEngine.addInputAdaptor(httpInputAdaptor);
   
    MixJobBuilder mixJobBuilder = new MixJobBuilder();
    FileJobExporter fileJobExporter = new FileJobExporter();
    MasterConfig config = new MasterConfig();
    config.load("master-config.properties");
    fileJobExporter.setConfig(config);
    mixJobBuilder.setConfig(config);
    mixJobBuilder.init();
    fileJobExporter.init();
   
    Map<String, Job> jobs = mixJobBuilder.build();
   
    for(Job job : jobs.values())
    {
      JobTask task = job.getJobTasks().get(0);
     
      job.setJobResult(defaultAnalysisEngine.doAnalysis(task).getResults())
     
      fileJobExporter.exportReport(job, false);
    }
     
    defaultAnalysisEngine.releaseResource();
    mixJobBuilder.releaseResource();
    fileJobExporter.releaseResource();
  }
View Full Code Here

  }

 
  @Test
  public void testExportReportJobTaskJobTaskResult() throws AnalysisException, UnsupportedEncodingException, IOException {
    StatisticsEngine defaultAnalysisEngine = new StatisticsEngine();
    defaultAnalysisEngine.init();
   
    IInputAdaptor fileInputAdaptor =  new FileInputAdaptor();
    IInputAdaptor httpInputAdaptor = new HttpInputAdaptor();
   
    defaultAnalysisEngine.addInputAdaptor(fileInputAdaptor);
    defaultAnalysisEngine.addInputAdaptor(httpInputAdaptor);
   
    MixJobBuilder mixJobBuilder = new MixJobBuilder();
    FileJobExporter fileJobExporter = new FileJobExporter();
    MasterConfig config = new MasterConfig();
    config.load("master-config.properties");
    fileJobExporter.setConfig(config);
    mixJobBuilder.setConfig(config);
    mixJobBuilder.init();
    fileJobExporter.init();
   
    Map<String, Job> jobs = mixJobBuilder.build();
   
    for(Job job : jobs.values())
    {
      JobTask task = job.getJobTasks().get(0);
     
      JobTaskResult jobTaskResult = defaultAnalysisEngine.doAnalysis(task)
     
      fileJobExporter.exportReport(task, jobTaskResult, false);
    }
     
    defaultAnalysisEngine.releaseResource();
    mixJobBuilder.releaseResource();
    fileJobExporter.releaseResource();
  }
View Full Code Here

 
  @Test
  public void testExportEntryDataAndLoadEntryData() throws AnalysisException, UnsupportedEncodingException, IOException, InterruptedException {
   
    StatisticsEngine defaultAnalysisEngine = new StatisticsEngine();
    defaultAnalysisEngine.init();
   
    IInputAdaptor fileInputAdaptor =  new FileInputAdaptor();
    IInputAdaptor httpInputAdaptor = new HttpInputAdaptor();
   
    defaultAnalysisEngine.addInputAdaptor(fileInputAdaptor);
    defaultAnalysisEngine.addInputAdaptor(httpInputAdaptor);
   
    MixJobBuilder mixJobBuilder = new MixJobBuilder();
    FileJobExporter fileJobExporter = new FileJobExporter();
    MasterConfig config = new MasterConfig();
    config.load("master-config.properties");
    fileJobExporter.setConfig(config);
    mixJobBuilder.setConfig(config);
    mixJobBuilder.init();
    fileJobExporter.init();
   
    Map<String, Job> jobs = mixJobBuilder.build();
    Job job = jobs.values().iterator().next();
   
    JobTask task = job.getJobTasks().get(0);
   
    job.setJobResult(defaultAnalysisEngine.doAnalysis(task).getResults());
   
    fileJobExporter.exportEntryData(job);
   
    Thread.sleep(1000);
   
    Map<String, Map<String, Object>> result = job.getJobResult();
    job.setJobResult(null);
   
    fileJobExporter.loadEntryData(job);
    fileJobExporter.loadEntryDataToTmp(job);
   
    Thread.sleep(1000);
   
    String key = result.keySet().iterator().next();
    String key2 = result.get(key).keySet().iterator().next();
    Object value = result.get(key).get(key2);
   
    Assert.assertEquals(job.getJobResult().get(key).get(key2), value);
    Assert.assertEquals(job.getDiskResult().get(key).get(key2), value);
   
     
    defaultAnalysisEngine.releaseResource();
    mixJobBuilder.releaseResource();
    fileJobExporter.releaseResource();
   
  }
View Full Code Here

   
  }

  @Test
  public void testAddTaskResultToQueue() throws AnalysisException, UnsupportedEncodingException, IOException, InterruptedException {
    StatisticsEngine defaultAnalysisEngine = new StatisticsEngine();
    defaultAnalysisEngine.init();
   
    IInputAdaptor fileInputAdaptor =  new FileInputAdaptor();
    IInputAdaptor httpInputAdaptor = new HttpInputAdaptor();
   
    defaultAnalysisEngine.addInputAdaptor(fileInputAdaptor);
    defaultAnalysisEngine.addInputAdaptor(httpInputAdaptor);
   
    SendResultsRequestEvent jobResponseEvent = new SendResultsRequestEvent("1234");
 
    JobTask task = jobManager.getJobs().values().iterator().next().getJobTasks().get(0);
   
    JobTaskResult jobTaskResult = defaultAnalysisEngine.doAnalysis(task)
 
    jobResponseEvent.setJobTaskResult(jobTaskResult);
   
    jobManager.addTaskResultToQueue(jobResponseEvent);
   
   
    JobTaskResult jobTaskResult2 = jobManager.getJobTaskResultsQueuePool().get(task.getJobName()).poll();
   
    Assert.assertEquals(jobTaskResult, jobTaskResult2);
   
    SendResultsResponseEvent sendResultsResponseEvent = (SendResultsResponseEvent)tunnel.getSlaveSide().poll();
   
    Assert.assertEquals("success", sendResultsResponseEvent.getResponse());
   
   
    //验证少了一个任务
    GetTaskRequestEvent event = new GetTaskRequestEvent("1234567");
    event.setRequestJobCount(3);
   
    jobManager.getUnDoJobTasks(event);
   
   
    GetTaskResponseEvent eventresp = (GetTaskResponseEvent)tunnel.getSlaveSide().poll();
   
    Assert.assertEquals(event.getSequence(), eventresp.getSequence());
    Assert.assertEquals(2, eventresp.getJobTasks().size());
   
    defaultAnalysisEngine.releaseResource();
  }
View Full Code Here

    private void buildSlave() {
        node = new SlaveNode();
        nodeConfig = new SlaveConfig();
        slaveConnector = new SocketSlaveConnector();
        monitor = new SlaveMonitor();
        statisticsEngine = new StatisticsEngine();
        httpInputAdaptor = new HttpInputAdaptor();
        hdfsInputAdaptor = new HdfsInputAdaptor();
        fileInputAdaptor = new FileInputAdaptor();
        hubInputAdaptor = new HubInputAdaptor();
        fileOutputAdaptor = new FileOutputAdaptor();
View Full Code Here

TOP

Related Classes of com.taobao.top.analysis.statistics.StatisticsEngine

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.