Package org.apache.hadoop.chukwa.datacollection.agent

Examples of org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent$StatsCollectorTask


      // Remove any adaptor left over from previous run
      ChukwaConfiguration cc = new ChukwaConfiguration();
      cc.set("chukwaAgent.control.port", "0");
      cc.setInt("chukwaAgent.fileTailingAdaptor.maxReadSize", 55);
      ChukwaAgent agent = new ChukwaAgent(cc);
      int portno = agent.getControllerPort();
      while (portno == -1) {
        Thread.sleep(1000);
        portno = agent.getControllerPort();
      }

      // System.out.println("Port number:" + portno);
      ChukwaAgentController cli = new ChukwaAgentController("localhost", portno);
      cli.removeAll();
      // sleep for some time to make sure we don't get chunk from existing
      // streams
      Thread.sleep(5000);
      long adaptorId = agent
          .processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8NewLineEscaped"
              + " BigRecord " + logFile + " 0");
      assertTrue(adaptorId != -1);

      boolean record8Found = false;
      Chunk c = null;
      // Keep reading until record8
      // If the adaptor is stopped then Junit will fail with a timeOut
      while (!record8Found) {
        c = chunks.waitForAChunk();
        String data = new String(c.getData());
        if (c.getDataType().equals("BigRecord")
            && data.indexOf("8 abcdefghijklmnopqrstuvwxyz") >= 0) {
          record8Found = true;
        }
      }
      agent.getAdaptorList().get(adaptorId).shutdown();
      agent.shutdown();
    } catch (Exception e) {
      Assert.fail("Exception in testBigRecord" + e.getMessage());
    } finally {
      if (f != null) {
        f.delete();
View Full Code Here


    chunks.start();
  }

  public void testStartAtOffset() throws IOException, InterruptedException,
      ChukwaAgent.AlreadyRunningException {
    ChukwaAgent agent = new ChukwaAgent();
    // Remove any adaptor left over from previous run
    ChukwaConfiguration cc = new ChukwaConfiguration();
    int portno = cc.getInt("chukwaAgent.control.port", 9093);
    ChukwaAgentController cli = new ChukwaAgentController("localhost", portno);
    cli.removeAll();
    // sleep for some time to make sure we don't get chunk from existing streams
    Thread.sleep(5000);
    File testFile = makeTestFile();
    int startOffset = 0; // skip first line
    long adaptorId = agent
        .processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8 "
            + "lines " + startOffset + " " + testFile + " " + startOffset);
    assertTrue(adaptorId != -1);
    System.out.println("getting a chunk...");
    Chunk c = chunks.waitForAChunk();
    System.out.println("got chunk");
    while (!c.getDataType().equals("lines")) {
      c = chunks.waitForAChunk();
    }
    assertTrue(c.getSeqID() == testFile.length() + startOffset);
    System.out.println("RecordOffsets length:" + c.getRecordOffsets().length);
    assertTrue(c.getRecordOffsets().length == 80); // 80 lines in the file.
    int recStart = 0;
    for (int rec = 0; rec < c.getRecordOffsets().length; ++rec) {
      String record = new String(c.getData(), recStart,
          c.getRecordOffsets()[rec] - recStart + 1);
      System.out.println("record " + rec + " was: " + record);
      assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n"));
      recStart = c.getRecordOffsets()[rec] + 1;
    }
    assertTrue(c.getDataType().equals("lines"));
    agent.stopAdaptor(adaptorId, false);
    agent.shutdown();
  }
View Full Code Here

    agent.shutdown();
  }

  public void testStartAfterOffset() throws IOException, InterruptedException,
      ChukwaAgent.AlreadyRunningException {
    ChukwaAgent agent = new ChukwaAgent();
    // Remove any adaptor left over from previous run
    ChukwaConfiguration cc = new ChukwaConfiguration();
    int portno = cc.getInt("chukwaAgent.control.port", 9093);
    ChukwaAgentController cli = new ChukwaAgentController("localhost", portno);
    cli.removeAll();
    // sleep for some time to make sure we don't get chunk from existing streams
    Thread.sleep(5000);
    File testFile = makeTestFile();
    int startOffset = 0;
    long adaptorId = agent
        .processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8 "
            + "lines "
            + startOffset
            + " "
            + testFile
            + " "
            + (startOffset + 29));
    assertTrue(adaptorId != -1);
    System.out.println("getting a chunk...");
    Chunk c = chunks.waitForAChunk();
    System.out.println("got chunk");
    while (!c.getDataType().equals("lines")) {
      c = chunks.waitForAChunk();
    }
    assertTrue(c.getSeqID() == testFile.length() + startOffset);

    assertTrue(c.getRecordOffsets().length == 79);// 80 lines in test file,
                                                  // minus the one we skipped
    int recStart = 0;
    for (int rec = 0; rec < c.getRecordOffsets().length; ++rec) {
      String record = new String(c.getData(), recStart,
          c.getRecordOffsets()[rec] - recStart + 1);
      System.out.println("record " + rec + " was: " + record);
      assertTrue(record.equals((rec + 1) + " abcdefghijklmnopqrstuvwxyz\n"));
      recStart = c.getRecordOffsets()[rec] + 1;
    }
    assertTrue(c.getDataType().equals("lines"));
    agent.stopAdaptor(adaptorId, false);
    agent.shutdown();
  }
View Full Code Here

  }

  public void testRawAdaptor() throws IOException, InterruptedException,
      ChukwaAgent.AlreadyRunningException {

    ChukwaAgent agent = new ChukwaAgent();
    // Remove any adaptor left over from previous run
    ChukwaConfiguration cc = new ChukwaConfiguration();
    int portno = cc.getInt("chukwaAgent.control.port", 9093);
    ChukwaAgentController cli = new ChukwaAgentController("localhost", portno);
    cli.removeAll();
    // sleep for some time to make sure we don't get chunk from existing streams
    Thread.sleep(5000);
    File testFile = makeTestFile("chukwaRawTest", 80);
    long adaptorId = agent
        .processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.FileTailingAdaptor"
            + " raw " + testFile + " 0");
    assertTrue(adaptorId != -1);
    Chunk c = chunks.waitForAChunk();
    while (!c.getDataType().equals("raw")) {
      c = chunks.waitForAChunk();
    }
    assertTrue(c.getDataType().equals("raw"));
    assertTrue(c.getRecordOffsets().length == 1);
    assertTrue(c.getSeqID() == testFile.length());
    agent.stopAdaptor(adaptorId, false);
    agent.shutdown();
  }
View Full Code Here

    chunks.start();
  }

  public void testCrSepAdaptor() throws IOException, InterruptedException,
      ChukwaAgent.AlreadyRunningException {
    ChukwaAgent agent = new ChukwaAgent();
    File testFile = makeTestFile("chukwaTest", 80);
    long adaptorId = agent
        .processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8"
            + " lines " + testFile + " 0");
    assertTrue(adaptorId != -1);
    System.out.println("getting a chunk...");
    Chunk c = chunks.waitForAChunk();
    System.out.println("got chunk");
    assertTrue(c.getSeqID() == testFile.length());

    assertTrue(c.getRecordOffsets().length == 80);
    int recStart = 0;
    for (int rec = 0; rec < c.getRecordOffsets().length; ++rec) {
      String record = new String(c.getData(), recStart,
          c.getRecordOffsets()[rec] - recStart + 1);
      System.out.println("record " + rec + " was: " + record);
      assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n"));
      recStart = c.getRecordOffsets()[rec] + 1;
    }
    assertTrue(c.getDataType().equals("lines"));
    agent.stopAdaptor(adaptorId, false);
    agent.shutdown();
  }
View Full Code Here

    chunks.start();
  }

  public void testLogRotate() throws IOException, InterruptedException,
      ChukwaAgent.AlreadyRunningException {
    ChukwaAgent agent = new ChukwaAgent();
    // Remove any adaptor left over from previous run
    ChukwaConfiguration cc = new ChukwaConfiguration();
    int portno = cc.getInt("chukwaAgent.control.port", 9093);
    ChukwaAgentController cli = new ChukwaAgentController("localhost", portno);
    cli.removeAll();
    // sleep for some time to make sure we don't get chunk from existing streams
    Thread.sleep(5000);
    File testFile = makeTestFile("chukwaLogRotateTest", 80);
    long adaptorId = agent
        .processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8"
            + " lines " + testFile + " 0");
    assertTrue(adaptorId != -1);
    System.out.println("getting a chunk...");
    Chunk c = chunks.waitForAChunk();
    System.out.println("got chunk");
    while (!c.getDataType().equals("lines")) {
      c = chunks.waitForAChunk();
    }
    assertTrue(c.getSeqID() == testFile.length());
    assertTrue(c.getRecordOffsets().length == 80);
    int recStart = 0;
    for (int rec = 0; rec < c.getRecordOffsets().length; ++rec) {
      String record = new String(c.getData(), recStart,
          c.getRecordOffsets()[rec] - recStart + 1);
      System.out.println("record " + rec + " was: " + record);
      assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n"));
      recStart = c.getRecordOffsets()[rec] + 1;
    }
    assertTrue(c.getDataType().equals("lines"));
    testFile = makeTestFile("chukwaLogRotateTest", 40);
    c = chunks.waitForAChunk();
    System.out.println("got chunk");
    while (!c.getDataType().equals("lines")) {
      c = chunks.waitForAChunk();
    }
    // assertTrue(c.getSeqID() == testFile.length());
    assertTrue(c.getRecordOffsets().length == 40);
    recStart = 0;
    for (int rec = 0; rec < c.getRecordOffsets().length; ++rec) {
      String record = new String(c.getData(), recStart,
          c.getRecordOffsets()[rec] - recStart + 1);
      System.out.println("record " + rec + " was: " + record);
      assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n"));
      recStart = c.getRecordOffsets()[rec] + 1;
    }
    assertTrue(c.getDataType().equals("lines"));
    agent.stopAdaptor(adaptorId, false);
    agent.shutdown();
  }
View Full Code Here

import org.apache.hadoop.chukwa.datacollection.controller.ChukwaAgentController;

public class TestFileExpirationPolicy extends TestCase {

  public void testExpiration() {
    ChukwaAgent agent = null;

    try {
      agent = new ChukwaAgent();
      // Remove any adaptor left over from previous run
      ChukwaConfiguration cc = new ChukwaConfiguration();
      int portno = cc.getInt("chukwaAgent.control.port", 9093);
      ChukwaAgentController cli = new ChukwaAgentController("localhost", portno);
      cli.removeAll();
      // sleep for some time to make sure we don't get chunk from existing
      // streams
      Thread.sleep(5000);

      FileTailingAdaptor.GRACEFUL_PERIOD = 30 * 1000;

      long adaptorId = agent
          .processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8NewLineEscaped MyType 0 /myWrongPath"
              + System.currentTimeMillis() + " 0");

      assertTrue(adaptorId != -1);

      assertTrue(agent.getAdaptorList().containsKey(adaptorId) == true);

      Thread.sleep(FileTailingAdaptor.GRACEFUL_PERIOD + 10000);
      assertTrue(agent.getAdaptorList().containsKey(adaptorId) == false);

    } catch (Exception e) {
      Assert.fail("Exception in TestFileExpirationPolicy");
    } finally {
      if (agent != null) {
        agent.shutdown();
      }
    }

  }
View Full Code Here

    }

  }

  public void testExpirationOnFileThatHasBennDeleted() {
    ChukwaAgent agent = null;
    File testFile = null;
    try {

      File tempDir = new File(System.getProperty("test.build.data", "/tmp"));
      if (!tempDir.exists()) {
        tempDir.mkdirs();
      }
      String logFile = tempDir.getPath() + "/chukwatestExpiration.txt";
      testFile = makeTestFile(logFile, 8000);

      agent = new ChukwaAgent();
      // Remove any adaptor left over from previous run
      ChukwaConfiguration cc = new ChukwaConfiguration();
      int portno = cc.getInt("chukwaAgent.control.port", 9093);
      ChukwaAgentController cli = new ChukwaAgentController("localhost", portno);
      cli.removeAll();
      // sleep for some time to make sure we don't get chunk from existing
      // streams
      Thread.sleep(5000);

      assertTrue(testFile.canRead() == true);

      FileTailingAdaptor.GRACEFUL_PERIOD = 30 * 1000;
      long adaptorId = agent
          .processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8NewLineEscaped MyType 0 "
              + logFile + " 0");

      assertTrue(adaptorId != -1);

      assertTrue(agent.getAdaptorList().containsKey(adaptorId) == true);

      Thread.sleep(10000);
      testFile.delete();

      Thread.sleep(FileTailingAdaptor.GRACEFUL_PERIOD + 10000);
      assertTrue(agent.getAdaptorList().containsKey(adaptorId) == false);
      agent.shutdown();
    } catch (Exception e) {
      Assert.fail("Exception in TestFileExpirationPolicy");
    } finally {
      if (agent != null) {
        agent.shutdown();
      }
    }
  }
View Full Code Here

    chunks.start();
  }

  public void testCrSepAdaptor() throws IOException, InterruptedException,
      ChukwaAgent.AlreadyRunningException {
    ChukwaAgent agent = new ChukwaAgent();
    // Remove any adaptor left over from previous run
    ChukwaConfiguration cc = new ChukwaConfiguration();
    int portno = cc.getInt("chukwaAgent.control.port", 9093);
    ChukwaAgentController cli = new ChukwaAgentController("localhost", portno);
    cli.removeAll();
    // sleep for some time to make sure we don't get chunk from existing streams
    Thread.sleep(5000);
    File testFile = makeTestFile("chukwaCrSepTest", 80);
    long adaptorId = agent
        .processCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8"
            + " lines " + testFile + " 0");
    assertTrue(adaptorId != -1);
    System.out.println("getting a chunk...");
    Chunk c = chunks.waitForAChunk();
    System.out.println("got chunk");
    while (!c.getDataType().equals("lines")) {
      c = chunks.waitForAChunk();
    }
    assertTrue(c.getSeqID() == testFile.length());
    assertTrue(c.getRecordOffsets().length == 80);
    int recStart = 0;
    for (int rec = 0; rec < c.getRecordOffsets().length; ++rec) {
      String record = new String(c.getData(), recStart,
          c.getRecordOffsets()[rec] - recStart + 1);
      System.out.println("record " + rec + " was: " + record);
      assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n"));
      recStart = c.getRecordOffsets()[rec] + 1;
    }
    assertTrue(c.getDataType().equals("lines"));
    agent.stopAdaptor(adaptorId, false);
    agent.shutdown();
  }
View Full Code Here

  @Override
  protected void setUp() throws ChukwaAgent.AlreadyRunningException, InterruptedException {
    Configuration conf = new Configuration();
    conf.set("chukwaAgent.control.port", "0");
    conf.setBoolean("chukwaAgent.checkpoint.enabled", false);
    agent = new ChukwaAgent(conf);
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent$StatsCollectorTask

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.