Package org.apache.hadoop.chukwa

Examples of org.apache.hadoop.chukwa.Chunk


            + " lines " + testFile + " 0");
    assertNotNull(adaptorId);
    assertEquals(1, agent.adaptorCount());

    System.out.println("getting a chunk...");
    Chunk c = chunks.waitForAChunk();
    System.out.println("got chunk");
    while (!c.getDataType().equals("lines")) {
      c = chunks.waitForAChunk();
    }
    assertTrue(c.getSeqID() == testFile.length());
    assertTrue(c.getRecordOffsets().length == 80);
    int recStart = 0;
    for (int rec = 0; rec < c.getRecordOffsets().length; ++rec) {
      String record = new String(c.getData(), recStart,
          c.getRecordOffsets()[rec] - recStart + 1);
      assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n"));
      recStart = c.getRecordOffsets()[rec] + 1;
    }
    assertTrue(c.getDataType().equals("lines"));
    agent.stopAdaptor(adaptorId, false);
    agent.shutdown();
    Thread.sleep(2000);
  }
View Full Code Here


        System.out.println("buzzed " + i + " times");
     
      assertEquals(0, agent.adaptorCount());
      agent.processAddCommand("add test = filetailer.FileTailingAdaptor raw " +testFile.getCanonicalPath() + " 0");
      assertEquals(1, agent.adaptorCount());
      Chunk c = chunks.waitForAChunk();
      String dat = new String(c.getData());
      assertTrue(dat.startsWith("0 abcdefghijklmnopqrstuvwxyz"));
      assertTrue(dat.endsWith("9 abcdefghijklmnopqrstuvwxyz\n"));
      assertTrue(c.getDataType().equals("raw"));
      if(agent.adaptorCount() > 0)
        agent.stopAdaptor("test", false);
    }
    agent.shutdown();
  }
View Full Code Here

       .processAddCommand("add org.apache.hadoop.chukwa.datacollection.adaptor"+
            "filetailer.CharFileTailingAdaptorUTF8 "
            + "lines " + startOffset + " " + testFile + " " + startOffset);
    assertTrue(adaptorId != null);
    System.out.println("getting a chunk...");
    Chunk c = chunks.waitForAChunk();
    System.out.println("got chunk");
    while (!c.getDataType().equals("lines")) {
      c = chunks.waitForAChunk();
    }
    assertTrue(c.getSeqID() == testFile.length() + startOffset);
    System.out.println("RecordOffsets length:" + c.getRecordOffsets().length);
    assertTrue(c.getRecordOffsets().length == 80); // 80 lines in the file.
    int recStart = 0;
    for (int rec = 0; rec < c.getRecordOffsets().length; ++rec) {
      String record = new String(c.getData(), recStart,
          c.getRecordOffsets()[rec] - recStart + 1);
      System.out.println("record " + rec + " was: " + record);
      assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n"));
      recStart = c.getRecordOffsets()[rec] + 1;
    }
    assertTrue(c.getDataType().equals("lines"));
    agent.stopAdaptor(adaptorId, false);
    agent.shutdown();
  }
View Full Code Here

            + testFile
            + " "
            + (startOffset + 29));
    assertTrue(adaptorId != null);
    System.out.println("getting a chunk...");
    Chunk c = chunks.waitForAChunk();
    System.out.println("got chunk");
    while (!c.getDataType().equals("lines")) {
      c = chunks.waitForAChunk();
    }
    assertTrue(c.getSeqID() == testFile.length() + startOffset);

    assertTrue(c.getRecordOffsets().length == 79);// 80 lines in test file,
                                                  // minus the one we skipped
    int recStart = 0;
    for (int rec = 0; rec < c.getRecordOffsets().length; ++rec) {
      String record = new String(c.getData(), recStart,
          c.getRecordOffsets()[rec] - recStart + 1);
      System.out.println("record " + rec + " was: " + record);
      assertTrue(record.equals((rec + 1) + " abcdefghijklmnopqrstuvwxyz\n"));
      recStart = c.getRecordOffsets()[rec] + 1;
    }
    assertTrue(c.getDataType().equals("lines"));
    agent.stopAdaptor(adaptorId, false);
    agent.shutdown();
  }
View Full Code Here

    String adaptorId = agent
        .processAddCommand("add org.apache.hadoop.chukwa.datacollection.adaptor."
            +"filetailer." + name
            + " raw " + testFile + " 0");
    assertNotNull(adaptorId);
    Chunk c = chunks.waitForAChunk(1000);
    assertNotNull(c);
    assertEquals(testFile.length(), c.getData().length);
    assertTrue(c.getDataType().equals("raw"));
    assertTrue(c.getRecordOffsets().length == 1);
    assertTrue(c.getSeqID() == testFile.length());
   
    c = chunks.waitForAChunk(1000);
    assertNull(c);
   
    agent.stopAdaptor(adaptorId, false);
View Full Code Here

   
    public void run() {
      setup();
      try {
        while(sock.isConnected()) {
          Chunk c = sendQ.take();
         
          if(sendRawBytes) {
            byte[] data = c.getData();
            out.writeInt(data.length);
            out.write(data);
          } else
            c.write(out);
        }
      } catch(IOException e) {
        log.info("lost tee", e);
        synchronized(tees) {
          tees.remove(this);
View Full Code Here

    out.delete();
    ChukwaDailyRollingFileAppender app = new ChukwaDailyRollingFileAppender(
        new SimpleLayout(), out.getAbsolutePath(), "yyyy-MM-dd");
    app.append(new LoggingEvent("foo", myLogger,  System.currentTimeMillis(),Priority.INFO, "foo", null));
    assertEquals(1, agent.adaptorCount());
    Chunk c = chunks.waitForAChunk();
    System.out.println("read a chunk OK");
    String logLine = new String(c.getData());
    assertTrue(logLine.equals("INFO - foo\n"));
    System.out.println(new String(c.getData()));
    //
    } catch(Exception e) {
      e.printStackTrace();
    }
  }
View Full Code Here

  public void testWithPs() throws ChukwaAgent.AlreadyRunningException {
    try {
      ChukwaAgent  agent = new ChukwaAgent();
      agent.processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.ExecAdaptor ps ps aux 0");
 
      Chunk c = chunks.waitForAChunk();
      System.out.println(new String(c.getData()));
    } catch(InterruptedException e) {
     
    }
  }
View Full Code Here

        this.wait();
      }
     
      int i = 0;
      while(!queue.isEmpty() && (i++ < maxCount)) {
        Chunk e = this.queue.remove();
        dataSize -= e.getData().length;
        events.add(e);
      }
      this.notifyAll();
    }
View Full Code Here

    int startOffset = 50;
    agent.processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8 " +
         "lines "+ startOffset+ " " + testFile + " " + startOffset);
    assertTrue(agent.adaptorCount() == 1);
    System.out.println("getting a chunk...");
    Chunk c = chunks.waitForAChunk();
    System.out.println("got chunk");
    assertTrue(c.getSeqID() == testFile.length() + startOffset);   
   
    assertTrue(c.getRecordOffsets().length == 80);//80 lines in test file
    int recStart = 0;
    for(int rec = 0 ; rec < c.getRecordOffsets().length; ++rec) {
      String record = new String(c.getData(), recStart, c.getRecordOffsets()[rec] - recStart+1);
      System.out.println("record "+ rec+ " was: " + record);
      assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n"));
      recStart = c.getRecordOffsets()[rec] +1;
    }
    assertTrue(c.getDataType().equals("lines"));   
    agent.shutdown();
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.chukwa.Chunk

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.