Package org.apache.hadoop.chukwa.extraction.demux.processor

Examples of org.apache.hadoop.chukwa.extraction.demux.processor.ChukwaOutputCollector


    public void map(ChukwaArchiveKey key, ChunkImpl chunk,
        OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter)
        throws IOException {

      ChukwaOutputCollector chukwaOutputCollector = new ChukwaOutputCollector(
          "DemuxMapOutput", output, reporter);
      try {
        long duration = System.currentTimeMillis();
        if (log.isDebugEnabled()) {
          log.debug("Entry: [" + chunk.getData() + "] EventType: ["
View Full Code Here


    }

    public void reduce(ChukwaRecordKey key, Iterator<ChukwaRecord> values,
        OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter)
        throws IOException {
      ChukwaOutputCollector chukwaOutputCollector = new ChukwaOutputCollector(
          "DemuxReduceOutput", output, reporter);
      try {
        long duration = System.currentTimeMillis();
        reporter.incrCounter("DemuxReduceInput", "total distinct keys", 1);
        reporter.incrCounter("DemuxReduceInput", key.getReduceType()
View Full Code Here

      keystr = keystr.trim();
      ChukwaRecord cr = new ChukwaRecord();
     
      for (int i = 0; i < NON_COUNTER_KEYS.length; i++) noncounters.add(NON_COUNTER_KEYS[i]);
     
      ChukwaOutputCollector coc = new ChukwaOutputCollector("SALSA_COMPLETE", output, reporter);

      int itemcount = 0;
      try {
        while (values.hasNext()) {
          itemcount++;
          tmpent = values.next();
          ents.add(tmpent.clone());
        }
      } catch (CloneNotSupportedException e) {
        // do nothing
      }

      log.debug("In reduce [Key " + keystr + "] (" + itemcount + " vals)");
     
      if (itemcount == 2) { // i.e. we have both start and end events

        if (ents.get(0).state_type.val == StateType.STATE_START &&
            ents.get(1).state_type.val == StateType.STATE_END)
        {
          start_rec = ents.get(0); end_rec = ents.get(1);
        } else if  (ents.get(1).state_type.val == StateType.STATE_START &&
                   ents.get(0).state_type.val == StateType.STATE_END)
        {
          start_rec = ents.get(1); end_rec = ents.get(0);
        } else {
          log.warn("In reduce [Key " + keystr + "] Invalid combination of state types: number of states: "+itemcount+".");
          // error handling?
        }
           
        cr.add(new String("STATE_NAME"),start_rec.state_name);
        cr.add(new String("STATE_UNIQ_ID"),start_rec.getUniqueID());
        cr.add(new String("TIMESTAMP"),start_rec.timestamp);
        cr.add(new String("TIME_START"),start_rec.time_start);
        cr.add(new String("TIME_END"),end_rec.time_end);
        cr.add(new String("TIME_START_MILLIS"),start_rec.time_start.substring(start_rec.time_start.length()-3));
        cr.add(new String("TIME_END_MILLIS"),end_rec.time_end.substring(end_rec.time_end.length()-3));
        cr.add(new String("HOST"),start_rec.host_exec);
        cr.add(new String("HOST_OTHER"),start_rec.host_other);
        cr.add(new String("JOB_ID"),start_rec.job_id);
        cr.add(new String("TASK_ID"),start_rec.getFriendlyID());

        Set<String> treemapkeys = end_rec.add_info.keySet();
        Iterator<String> keyIter = treemapkeys.iterator();
       
        for (int i = 0; i < treemapkeys.size(); i++) {
          assert(keyIter.hasNext());
          String currkey = keyIter.next();
          if (currkey != null &&
              !noncounters.contains(currkey)) {
            cr.add(new String("COUNTER_" + currkey), end_rec.add_info.get(currkey))
          } else if (currkey != null && noncounters.contains(currkey)) {
            cr.add(new String(currkey), end_rec.add_info.get(currkey));       
          }
        }
        assert(!keyIter.hasNext());
        cr.setTime(Long.parseLong(start_rec.timestamp));
       
        newkey = null;
        newkey = new String(start_rec.time_orig_epoch +
          SEP + start_rec.getUniqueID() + SEP + start_rec.time_orig);

        log.info("Key ["+newkey+"] Task ["+start_rec.getUniqueID()+"] Job ["+start_rec.job_id+"] Friendly ["+start_rec.getFriendlyID()+"]");

        addStitchingFields(cr);
        log.debug(cr);
        coc.collect(new ChukwaRecordKey(key.getReduceType(), newkey), cr);
       
      } else if (itemcount == 1) {
        // check that we have only the start; if we have only the end, dump it
        // otherwise change the reducetype to get record written to file for
        // incomplete entries
View Full Code Here

    public void map(ChukwaArchiveKey key, ChunkImpl chunk,
        OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter)
        throws IOException {

      ChukwaOutputCollector chukwaOutputCollector = new ChukwaOutputCollector(
          "DemuxMapOutput", output, reporter);
      try {
        long duration = System.currentTimeMillis();
        if (log.isDebugEnabled()) {
          log.debug("Entry: [" + chunk.getData() + "] EventType: ["
View Full Code Here

  public static class ReduceClass extends MapReduceBase implements
      Reducer<ChukwaRecordKey, ChukwaRecord, ChukwaRecordKey, ChukwaRecord> {
    public void reduce(ChukwaRecordKey key, Iterator<ChukwaRecord> values,
        OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter)
        throws IOException {
      ChukwaOutputCollector chukwaOutputCollector = new ChukwaOutputCollector(
          "DemuxReduceOutput", output, reporter);
      try {
        long duration = System.currentTimeMillis();
        reporter.incrCounter("DemuxReduceInput", "total distinct keys", 1);
        reporter.incrCounter("DemuxReduceInput", key.getReduceType()
View Full Code Here

    public void map(ChukwaArchiveKey key, ChunkImpl chunk,
        OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter)
        throws IOException {

      ChukwaOutputCollector chukwaOutputCollector = new ChukwaOutputCollector(
          "DemuxMapOutput", output, reporter);
      try {
        long duration = System.currentTimeMillis();
        if (log.isDebugEnabled()) {
          log.debug("Entry: [" + chunk.getData() + "] EventType: ["
View Full Code Here

  public static class ReduceClass extends MapReduceBase implements
      Reducer<ChukwaRecordKey, ChukwaRecord, ChukwaRecordKey, ChukwaRecord> {
    public void reduce(ChukwaRecordKey key, Iterator<ChukwaRecord> values,
        OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter)
        throws IOException {
      ChukwaOutputCollector chukwaOutputCollector = new ChukwaOutputCollector(
          "DemuxReduceOutput", output, reporter);
      try {
        long duration = System.currentTimeMillis();
        reporter.incrCounter("DemuxReduceInput", "total distinct keys", 1);
        reporter.incrCounter("DemuxReduceInput", key.getReduceType()
View Full Code Here

    public void map(ChukwaArchiveKey key, ChunkImpl chunk,
                    OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter)
            throws IOException {

      ChukwaOutputCollector chukwaOutputCollector = new ChukwaOutputCollector(
              "DemuxMapOutput", output, reporter);
      try {
        long duration = System.currentTimeMillis();
        if (log.isDebugEnabled()) {
          log.debug("Entry: [" + chunk.getData() + "] EventType: ["
View Full Code Here

    }

    public void reduce(ChukwaRecordKey key, Iterator<ChukwaRecord> values,
                       OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter)
            throws IOException {
      ChukwaOutputCollector chukwaOutputCollector = new ChukwaOutputCollector(
              "DemuxReduceOutput", output, reporter);
      try {
        long duration = System.currentTimeMillis();
        reporter.incrCounter("DemuxReduceInput", "total distinct keys", 1);
        reporter.incrCounter("DemuxReduceInput", key.getReduceType()
View Full Code Here

    public void map(ChukwaArchiveKey key, ChunkImpl chunk,
        OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter)
        throws IOException {

      ChukwaOutputCollector chukwaOutputCollector = new ChukwaOutputCollector(
          "DemuxMapOutput", output, reporter);
      try {
        long duration = System.currentTimeMillis();
        if (log.isDebugEnabled()) {
          log.debug("Entry: [" + chunk.getData() + "] EventType: ["
View Full Code Here

TOP

Related Classes of org.apache.hadoop.chukwa.extraction.demux.processor.ChukwaOutputCollector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.