Package com.cloudera.util

Examples of com.cloudera.util.Benchmark


   * 1M x 100 bytes, 5 times
   */
  @Test
  public void testNewExtractScan1000() throws IOException,
      EventExtractException {
    Benchmark b = new Benchmark("new extract - scan 1000 blocks");

    b.mark("build dataset");
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    // 1M x 100 byte messages, 0 is the rand seed
    NoNlSynthSource src = new NoNlSynthSource(1000000, 100, 0);

    src.open();
    Event e = null;
    while ((e = src.next()) != null) {
      out.write("<33>".getBytes());
      out.write(e.getBody());
      out.write('\n');
    }

    b.mark("start parsing dataset");
    int good = 0;
    int bad = 0;
    int lines = 0;

    // We do this test 100 times!
    for (int i = 0; i < 5; i++) {
      DataInputStream in = new DataInputStream(new ByteArrayInputStream(out
          .toByteArray()));
      try {
        byte[] data = new byte[1000];
        while (true) {
          lines++;

          in.readFully(data);
        }
      } catch (EOFException eof) {
        // expected.
      }

    }
    b.mark("complete-good-bad", good, bad, lines);
    b.done();
  }
View Full Code Here


   *
   * 1M x 100 bytes, 5 times
   */
  @Test
  public void testNewExtractScan() throws IOException, EventExtractException {
    Benchmark b = new Benchmark("new extract - scan single byte");

    b.mark("build dataset");
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    // 1M x 100 byte messages, 0 is the rand seed
    NoNlSynthSource src = new NoNlSynthSource(1000000, 100, 0);

    src.open();
    Event e = null;
    while ((e = src.next()) != null) {
      out.write("<33>".getBytes());
      out.write(e.getBody());
      out.write('\n');
    }

    b.mark("start parsing dataset");
    int good = 0;
    int bad = 0;
    int lines = 0;

    // We do this test 100 times!
    for (int i = 0; i < 5; i++) {
      DataInputStream in = new DataInputStream(new ByteArrayInputStream(out
          .toByteArray()));
      try {
        while (true) {
          lines++;

          in.readByte();
        }
      } catch (EOFException eof) {
        // expected.
      }

    }
    b.mark("complete-good-bad", good, bad, lines);
    b.done();
  }
View Full Code Here

   * Generates a dataset, puts it into a memory buffer, and the uses the
   * DataInputStream machinery to read through it one parsed record at a time.
   */
  @Test
  public void testNewExtract() throws IOException, EventExtractException {
    Benchmark b = new Benchmark("regex extract");

    b.mark("build dataset");
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    // 1M x 100 byte messages, 0 is the rand seed
    NoNlSynthSource src = new NoNlSynthSource(1000000, 100, 0);

    src.open();
    Event e = null;
    while ((e = src.next()) != null) {
      out.write("<33>".getBytes());
      out.write(e.getBody());
      out.write('\n');
    }

    byte[] outbytes = out.toByteArray();
    System.out.println("Outbytes length : " + outbytes.length);
    b.mark("start parsing dataset");
    int good = 0;
    int bad = 0;
    int lines = 0;

    // We do this test 50 times!
    for (int i = 0; i < 5; i++) {
      DataInputStream in = new DataInputStream(new ByteArrayInputStream(
          outbytes));

      Event evt = null;
      while (true) {
        try {
          lines++;
          evt = SyslogWireExtractor.extractEvent(in);
          if (evt == null)
            break;
          good++;
        } catch (Exception eee) {
          bad++;
        }
      }
    }
    b.mark("complete-good-bad", good, bad, lines);
    b.done();
  }
View Full Code Here

*/
public class PerfReportSinks implements ExamplePerfData {

  @Test
  public void testNullSink() throws IOException, InterruptedException {
    Benchmark b = new Benchmark("nullsink");
    b.mark("begin");
    TextFileSource txt = new TextFileSource(HADOOP_DATA[0]);
    txt.open();
    MemorySinkSource mem = new MemorySinkSource();
    mem.open();
    EventUtil.dumpAll(txt, mem);

    b.mark("disk_loaded");

    EventSink nullsnk = new NullSink();
    EventUtil.dumpAll(mem, nullsnk);
    b.mark("nullsink done");

    b.done();
  }
View Full Code Here

    b.done();
  }

  @Test
  public void testCountSink() throws IOException, InterruptedException {
    Benchmark b = new Benchmark("nullsink");
    b.mark("begin");
    TextFileSource txt = new TextFileSource(HADOOP_DATA[0]);
    txt.open();
    MemorySinkSource mem = new MemorySinkSource();
    mem.open();
    EventUtil.dumpAll(txt, mem);

    b.mark("disk_loaded");

    CounterSink snk = new CounterSink("counter");
    EventUtil.dumpAll(mem, snk);
    b.mark(snk.getName() + " done", snk.getCount());

    b.done();
  }
View Full Code Here

    b.done();
  }

  @Test
  public void testHadoopRegexes() throws IOException, InterruptedException {
    Benchmark b = new Benchmark("hadoop_regexes");
    b.mark("begin");
    TextFileSource txt = new TextFileSource(HADOOP_DATA[0]);
    txt.open();
    MemorySinkSource mem = new MemorySinkSource();
    mem.open();
    EventUtil.dumpAll(txt, mem);

    b.mark("disk_loaded");

    SimpleRegexReporterBuilder bld = new SimpleRegexReporterBuilder(
        HADOOP_REGEXES);

    Collection<RegexGroupHistogramSink> sinks = bld.load();
    MultiReporter snk = new MultiReporter("hadoop_regex_sinks", sinks);
    snk.open();
    b.mark("filters_loaded", new File(HADOOP_REGEXES).getName(), sinks.size());

    EventUtil.dumpAll(mem, snk);
    b.mark(snk.getName() + " done");

    b.done();
  }
View Full Code Here

    b.done();
  }

  @Test
  public void testHadoopRegexes11() throws IOException, InterruptedException {
    Benchmark b = new Benchmark("hadoop_regexes");
    b.mark("begin");
    TextFileSource txt = new TextFileSource(HADOOP_DATA[0]);
    txt.open();
    MemorySinkSource mem = new MemorySinkSource();
    mem.open();
    EventUtil.dumpAll(txt, mem);

    b.mark("disk_loaded");

    SimpleRegexReporterBuilder bld = new SimpleRegexReporterBuilder(
        HADOOP_REGEXES_11);

    Collection<RegexGroupHistogramSink> sinks = bld.load();
    MultiReporter snk = new MultiReporter("hadoop_regex_sinks", sinks);
    snk.open();
    b.mark("filters_loaded", new File(HADOOP_REGEXES_11).getName(), sinks
        .size());

    EventUtil.dumpAll(mem, snk);
    b.mark(snk.getName() + " done");

    b.done();
  }
View Full Code Here

*/
public class PerfDiskIO {

  @Test
  public void testWrite() throws IOException, InterruptedException {
    Benchmark b = new Benchmark("seqfile write");
    b.mark("begin");
    MemorySinkSource mem = FlumeBenchmarkHarness.synthInMem();
    b.mark("disk_loaded");

    File tmp = File.createTempFile("test", "tmp");
    tmp.deleteOnExit();
    SeqfileEventSink sink = new SeqfileEventSink(tmp);
    sink.open();
    b.mark("receiver_started");

    EventUtil.dumpAll(mem, sink);

    b.mark("seqfile_disk_write");

    sink.close();
    b.mark("seqfile size", tmp.length());
    b.done();
    mem = null; // allow mem to be freed.

    // //////// second phase using the file written in previous phase.
    Benchmark b2 = new Benchmark("seqfile_disk_read");
    b2.mark("begin");

    SeqfileEventSource seq = new SeqfileEventSource(tmp.getAbsolutePath());
    seq.open();
    MemorySinkSource mem2 = new MemorySinkSource();
    EventUtil.dumpAll(seq, mem2);
    seq.close();
    b2.mark("seqfile_loaded");

    b2.done();
  }
View Full Code Here

   * mem -> AvroEventSink -> AvroEventSource -> NullSink
   */
  @Test
  public void testAvroSend() throws IOException, InterruptedException {

    Benchmark b = new Benchmark("nullsink");
    b.mark("begin");
    MemorySinkSource mem = FlumeBenchmarkHarness.synthInMem();
    b.mark("disk_loaded");

    FlumeConfiguration conf = FlumeConfiguration.get();
    final AvroEventSource tes = new AvroEventSource(conf.getCollectorPort());
    tes.open();
    // need to drain the sink otherwise its queue will fill up with events!
    Thread drain = new Thread("drain") {
      public void run() {
        try {
          EventUtil.dumpAll(tes, new NullSink());
        } catch (Exception e) {
          // TODO Auto-generated catch block
          e.printStackTrace();
        }
      }
    };
    drain.start(); // drain the sink.
    b.mark("receiver_started");

    final AvroEventSink snk = new AvroEventSink("0.0.0.0",
        conf.getCollectorPort());
    snk.open();
    b.mark("sink_started");

    EventUtil.dumpAll(mem, snk);
    b.mark("Avro sink to Avro source done");
    // MB/s = B/us
    b.mark("MB/s", (double) snk.getSentBytes()
        / (double) (b.getLastDelta() / 1000));
    tes.close();
    snk.close();
    drain.interrupt();
    b.done();
  }
View Full Code Here

   * mem -> batch(10) AvroEventSink -> AvroEventSource -> NullSink
   */
  @Test
  public void testAvroBatchSend10() throws IOException, InterruptedException {

    Benchmark b = new Benchmark("nullsink");
    b.mark("begin");
    MemorySinkSource mem = FlumeBenchmarkHarness.synthInMem();
    b.mark("disk_loaded");

    FlumeConfiguration conf = FlumeConfiguration.get();
    final AvroEventSource tes = new AvroEventSource(conf.getCollectorPort());
    tes.open();
    // need to drain the sink otherwise its queue will fill up with events!
    Thread drain = new Thread("drain") {
      public void run() {
        try {
          EventUtil.dumpAll(tes, new NullSink());
        } catch (Exception e) {
          // TODO Auto-generated catch block
          e.printStackTrace();
        }
      }
    };
    drain.start(); // drain the sink.
    b.mark("receiver_started");

    final AvroEventSink tsnk = new AvroEventSink("0.0.0.0",
        conf.getCollectorPort());
    // make size happen first all the time.
    final BatchingDecorator snk = new BatchingDecorator(tsnk, 10, 10000000);
    snk.open();
    b.mark("sink_started");

    EventUtil.dumpAll(mem, snk);
    b.mark("Avro sink to Avro source done");
    // MB/s = B/us
    b.mark("MB/s", (double) tsnk.getSentBytes()
        / (double) (b.getLastDelta() / 1000));

    tes.close();
    snk.close();
    drain.interrupt();
    b.done();
  }
View Full Code Here

TOP

Related Classes of com.cloudera.util.Benchmark

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.