Package org.apache.hadoop.chukwa

Examples of org.apache.hadoop.chukwa.Chunk


   
    public void run() {
      setup();
      try {
        while(sock.isConnected()) {
          Chunk c = sendQ.take();
         
          if(fmt == DataFormat.Raw) {
            byte[] data = c.getData();
            out.writeInt(data.length);
            out.write(data);
          } else if(fmt == DataFormat.Writable)
            c.write(out);
          else {
            byte[] data = c.getData();
            byte[] header = (c.getSource()+ " " + c.getDataType() + " " + c.getStreamName()+ " "
                c.getSeqID()+"\n").getBytes();
            out.writeInt(data.length+ header.length);
            out.write(header);
            out.write(data);
          }
        }
View Full Code Here


  public void testWithPs() throws InterruptedException {
    ChunkCatcherConnector chunks = new ChunkCatcherConnector();
    chunks.start();
    String psAgentID = agent.processAddCommand(
        "add exec= org.apache.hadoop.chukwa.datacollection.adaptor.ExecAdaptor ps 500 ps aux 0");
    Chunk c = chunks.waitForAChunk();
    System.out.println(new String(c.getData()));
    assertNotNull(psAgentID);
  }
View Full Code Here

   *
   * @throws UnsupportedEncodingException
   */
  @Test
  public void testDontBreakLines() throws UnsupportedEncodingException {
    Chunk c = chunks.waitForAChunk(5000);
    String data = new String(c.getData(), "UTF-8");

    String[] lines = data.split("\\r?\\n");

    // Check that length of the last line is the same as the
    // one of the first line. Otherwise, it means the last
View Full Code Here

   * @throws UnsupportedEncodingException
   */
  @Test
  public void testSecondChunkDataFollowsFirstChunkData()
      throws UnsupportedEncodingException {
    Chunk c = chunks.waitForAChunk(5000);
    String data = new String(c.getData(), "UTF-8");
    String[] lines1 = data.split("\\r?\\n");

    c = chunks.waitForAChunk(5000);
    data = new String(c.getData(), "UTF-8");
    String[] lines2 = data.split("\\r?\\n");

    int numLastLineChunk1 = (int) (lines1[lines1.length - 1].charAt(0));
    int numLastLineChunk2 = (int) (lines2[0].charAt(0));

View Full Code Here

   * @throws UnsupportedEncodingException
   */
  @Test
  public void testOnlyOneSetRecordOffset()
      throws UnsupportedEncodingException {
    Chunk c = chunks.waitForAChunk(5000);
    String data = new String(c.getData(), "UTF-8");
    String[] lines = data.split("\\r?\\n");

    // Check that we have more than two lines
    assertTrue(lines.length > 2);

    int[] offsets_i = c.getRecordOffsets();

    // Check that we only have one offset
    assertEquals(1, offsets_i.length);
  }
View Full Code Here

            + " lines " + testFile + " 0");
    assertNotNull(adaptorId);
    assertEquals(1, agent.adaptorCount());

    System.out.println("getting a chunk...");
    Chunk c = chunks.waitForAChunk();
    System.out.println("got chunk");
    while (!c.getDataType().equals("lines")) {
      c = chunks.waitForAChunk();
    }
    assertTrue(c.getSeqID() == testFile.length());
    assertTrue(c.getRecordOffsets().length == 80);
    int recStart = 0;
    for (int rec = 0; rec < c.getRecordOffsets().length; ++rec) {
      String record = new String(c.getData(), recStart,
          c.getRecordOffsets()[rec] - recStart + 1);
      assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n"));
      recStart = c.getRecordOffsets()[rec] + 1;
    }
    assertTrue(c.getDataType().equals("lines"));
    agent.stopAdaptor(adaptorId, false);
    agent.shutdown();
    Thread.sleep(2000);
  }
View Full Code Here

        System.out.println("buzzed " + i + " times");
     
      assertEquals(0, agent.adaptorCount());
      agent.processAddCommand("add adaptor_test = filetailer.FileTailingAdaptor raw " +testFile.getCanonicalPath() + " 0");
      assertEquals(1, agent.adaptorCount());
      Chunk c = chunks.waitForAChunk();
      String dat = new String(c.getData());
      assertTrue(dat.startsWith("0 abcdefghijklmnopqrstuvwxyz"));
      assertTrue(dat.endsWith("9 abcdefghijklmnopqrstuvwxyz\n"));
      assertTrue(c.getDataType().equals("raw"));
      if(agent.adaptorCount() > 0)
        agent.stopAdaptor("adaptor_test", false);
    }
    agent.shutdown();
  }
View Full Code Here

    Thread.sleep(500);
    long initializedSlurpTimeValue = fta.lastSlurpTime;
    assertTrue( initializedSlurpTimeValue > startTime); // initialized to current time
   
    makeTestFile("fooSlurp", 2,baseDir);
    Chunk c = chunks.waitForAChunk();

    Thread.sleep(2000);
    // lastSlurpTime has been updated because a slurp was done
    long secondSlurpTimeValue = fta.lastSlurpTime;
    assertTrue( secondSlurpTimeValue > initializedSlurpTimeValue);
    assertEquals( fta.fileReadOffset, c.getData().length);
    assertEquals( fta.fileReadOffset, fta.reader.length());
   
    Thread.sleep(2000);
    // ensure we don't try to slurp if file is not updated
    assertEquals( fta.lastSlurpTime, secondSlurpTimeValue);
View Full Code Here

  public void doTest(Date date, String recordData) {
    ChunkBuilder cb = new ChunkBuilder();
    cb.addRecord(recordData.getBytes());

    Chunk chunk = cb.getChunk();
    chunk.setDataType(DATA_TYPE);
    chunk.setSource(DATA_SOURCE);

    ChukwaTestOutputCollector<ChukwaRecordKey, ChukwaRecord> output =
            new ChukwaTestOutputCollector<ChukwaRecordKey, ChukwaRecord>();

    TsProcessor p = new TsProcessor();
View Full Code Here

  public void testParseIllegalRegex() {
    jobConf.set(TsProcessor.DEFAULT_TIME_REGEX, "(");

    ChunkBuilder cb = new ChunkBuilder();
    cb.addRecord("2012-10-25 00:18:44,818 some sample record data".getBytes());
    Chunk chunk = cb.getChunk();

    TsProcessor p = new TsProcessor();
    p.reset(chunk);

    ChukwaTestOutputCollector<ChukwaRecordKey, ChukwaRecord> output =
View Full Code Here

TOP

Related Classes of org.apache.hadoop.chukwa.Chunk

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.