Package org.apache.hadoop.chukwa.extraction.engine

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord


    long currentTimeMillis = System.currentTimeMillis();
    boolean isSuccessful = true;
    String recordType = null;

    ChukwaRecordKey key = new ChukwaRecordKey();
    ChukwaRecord record = new ChukwaRecord();
    try {
      int batch = 0;
      while (reader.next(key, record)) {
        String sqlTime = DatabaseWriter.formatTimeStamp(record.getTime());
        log.debug("Timestamp: " + record.getTime());
        log.debug("DataType: " + key.getReduceType());

        String[] fields = record.getFields();
        String table = null;
        String[] priKeys = null;
        HashMap<String, HashMap<String, String>> hashReport = new HashMap<String, HashMap<String, String>>();
        StringBuilder normKey = new StringBuilder();
        String node = record.getValue("csource");
        recordType = key.getReduceType().toLowerCase();
        String dbKey = "report.db.name." + recordType;
        if (dbTables.containsKey(dbKey)) {
          String[] tmp = mdlConfig.findTableName(mdlConfig.get(dbKey), record
              .getTime(), record.getTime());
          table = tmp[0];
        } else {
          log.debug(dbKey + " does not exist.");
          continue;
        }
        log.debug("table name:" + table);
        try {
          priKeys = mdlConfig.get("report.db.primary.key." + recordType).split(
              ",");
        } catch (Exception nullException) {
        }
        for (String field : fields) {
          String keyName = escape(field.toLowerCase(), newSpace);
          String keyValue = escape(record.getValue(field).toLowerCase(),
              newSpace);
          StringBuilder buildKey = new StringBuilder();
          buildKey.append("normalize.");
          buildKey.append(recordType);
          buildKey.append(".");
          buildKey.append(keyName);
          if (normalize.containsKey(buildKey.toString())) {
            if (normKey.toString().equals("")) {
              normKey.append(keyName);
              normKey.append(".");
              normKey.append(keyValue);
            } else {
              normKey.append(".");
              normKey.append(keyName);
              normKey.append(".");
              normKey.append(keyValue);
            }
          }
          StringBuilder normalizedKey = new StringBuilder();
          normalizedKey.append("metric.");
          normalizedKey.append(recordType);
          normalizedKey.append(".");
          normalizedKey.append(normKey);
          if (hashReport.containsKey(node)) {
            HashMap<String, String> tmpHash = hashReport.get(node);
            tmpHash.put(normalizedKey.toString(), keyValue);
            hashReport.put(node, tmpHash);
          } else {
            HashMap<String, String> tmpHash = new HashMap<String, String>();
            tmpHash.put(normalizedKey.toString(), keyValue);
            hashReport.put(node, tmpHash);
          }
        }
        for (String field : fields) {
          String valueName = escape(field.toLowerCase(), newSpace);
          String valueValue = escape(record.getValue(field).toLowerCase(),
              newSpace);
          StringBuilder buildKey = new StringBuilder();
          buildKey.append("metric.");
          buildKey.append(recordType);
          buildKey.append(".");
View Full Code Here


      OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter)
      throws Throwable {
    try {
      String dStr = recordEntry.substring(0, 23);
      Date d = sdf.parse(dStr);
      ChukwaRecord record = new ChukwaRecord();
      this.buildGenericRecord(record, recordEntry, d.getTime(), chunk
          .getDataType());
      output.collect(key, record);
    } catch (ParseException e) {
      log.warn("Unable to parse the date in DefaultProcessor [" + recordEntry
View Full Code Here

      convertDate.set(Calendar.YEAR, year);
      if(convertDate.getTimeInMillis() > Calendar.getInstance().getTimeInMillis()) {
        convertDate.set(Calendar.YEAR, year - 1);
      }

      ChukwaRecord record = new ChukwaRecord();
      buildGenericRecord(record, recordEntry, convertDate.getTime().getTime(),
          "SysLog");
      output.collect(key, record);
    } catch (ParseException e) {
      e.printStackTrace();
View Full Code Here

      String body = recordEntry.substring(idx + 1);
      body.replaceAll("\n", "");
      Date d = sdf.parse(dStr);
      String[] kvpairs = body.split(", ");

      ChukwaRecord record = new ChukwaRecord();
      String kvpair = null;
      String[] halves = null;
      boolean containRecord = false;
      for (int i = 0; i < kvpairs.length; ++i) {
        kvpair = kvpairs[i];
        if (kvpair.indexOf("=") >= 0) {
          halves = kvpair.split("=");
          record.add(halves[0], halves[1]);
          containRecord = true;
        }
      }
      if (record.containsField("Machine")) {
        buildGenericRecord(record, null, d.getTime(), "HodMachine");
      } else {
        buildGenericRecord(record, null, d.getTime(), "HodJob");
      }
      if (containRecord) {
View Full Code Here

  // [MAP-REDUCE_FRAMEWORK_REDUCE_OUTPUT_RECORDS] :739000

  @Override
  public void process(Reader reader) throws DBException {
    ChukwaRecordKey key = new ChukwaRecordKey();
    ChukwaRecord record = new ChukwaRecord();
    try {
      StringBuilder sb = new StringBuilder();
      while (reader.next(key, record)) {

        sb.append("insert into MRJobCounters ");
        for (String field : fields) {
          sb.append(" set ").append(field).append(" = ").append(
              record.getValue(field)).append(", ");
        }
        sb.append(" set timestamp =").append(record.getTime()).append(";\n");
      }
      System.out.println(sb.toString());
    } catch (Exception e) {
      log.error("Unable to insert data into database" + e.getMessage());
      e.printStackTrace();
View Full Code Here

      OutputCollector<ChukwaRecordKey, ChukwaRecord> output, Reporter reporter) {
    try {
      reporter.incrCounter("DemuxError", "count", 1);
      reporter.incrCounter("DemuxError", chunk.getDataType() + "Count", 1);

      ChukwaRecord record = new ChukwaRecord();
      long ts = System.currentTimeMillis();
      Calendar calendar = Calendar.getInstance();
      calendar.setTimeInMillis(ts);
      calendar.set(Calendar.MINUTE, 0);
      calendar.set(Calendar.SECOND, 0);
      calendar.set(Calendar.MILLISECOND, 0);
      ChukwaRecordKey key = new ChukwaRecordKey();
      key.setKey("" + calendar.getTimeInMillis() + "/" + chunk.getDataType()
          + "/" + chunk.getSource() + "/" + ts);
      key.setReduceType(chunk.getDataType() + "InError");

      record.setTime(ts);

      record.add(Record.tagsField, chunk.getTags());
      record.add(Record.sourceField, chunk.getSource());
      record.add(Record.applicationField, chunk.getApplication());

      DataOutputBuffer ob = new DataOutputBuffer(chunk
          .getSerializedSizeEstimate());
      chunk.write(ob);
      record.add(Record.chunkDataField, new String(ob.getData()));
      record.add(Record.chunkExceptionField, ExceptionUtil
          .getStackTrace(throwable));
      output.collect(key, record);

      return record;
    } catch (Throwable e) {
View Full Code Here

      }

      String[] values = null;

      // Data
      ChukwaRecord record = null;

      for (int i = 1; i < lines.length; i++) {
        values = lines[i].split("[\\s]++");
        key = new ChukwaRecordKey();
        record = new ChukwaRecord();
        this.buildGenericRecord(record, null, d.getTime(), "Df");

        record.add(headerCols[0], values[0]);
        record.add(headerCols[1], values[1]);
        record.add(headerCols[2], values[2]);
        record.add(headerCols[3], values[3]);
        record.add(headerCols[4], values[4]
            .substring(0, values[4].length() - 1)); // Remove %
        record.add(headerCols[5], values[5]);

        output.collect(key, record);
      }

      // log.info("DFProcessor output 1 DF record");
View Full Code Here

      }
      line.setLogType("JobData");
    }
   
    key = new ChukwaRecordKey();
    ChukwaRecord record = new ChukwaRecord();
    this.buildGenericRecord(record, null, -1l, line.getLogType());
   
    for (Entry<String, String> entry : line.entrySet()) {
      record.add(entry.getKey(), entry.getValue());
    }
   
    for(Entry<String, Long> entry : line.getCounterHash().flat().entrySet()) {
      record.add(entry.getKey(), entry.getValue().toString());
    }
   
    long timestamp = line.getTimestamp();
    record.setTime(timestamp);
    key.setKey(getKey(timestamp, line.getJobId()));
    output.collect(key, record);
  }
View Full Code Here

      // log.info("record [" + recordEntry + "] body [" + body +"]");
      Date d = sdf.parse(dStr);

      JSONObject json = new JSONObject(body);

      ChukwaRecord record = new ChukwaRecord();
      String datasource = null;
      String recordName = null;

      Iterator<String> ki = json.keys();
      while (ki.hasNext()) {
        String keyName = ki.next();
        if (chukwaTimestampField.intern() == keyName.intern()) {
          d = new Date(json.getLong(keyName));
          Calendar cal = Calendar.getInstance();
          cal.setTimeInMillis(d.getTime());
          cal.set(Calendar.SECOND, 0);
          cal.set(Calendar.MILLISECOND, 0);
          d.setTime(cal.getTimeInMillis());
        } else if (contextNameField.intern() == keyName.intern()) {
          datasource = "Hadoop_" + json.getString(keyName);
        } else if (recordNameField.intern() == keyName.intern()) {
          recordName = json.getString(keyName);
          record.add(keyName, json.getString(keyName));
        } else {
          record.add(keyName, json.getString(keyName));
        }
      }

      datasource = datasource + "_" + recordName;
      buildGenericRecord(record, null, d.getTime(), datasource);
View Full Code Here

      }
      kvMatcher.reset(recordEntry);
      if (!kvMatcher.find()) {
        throw new IOException("Failed to find record");
      }
      ChukwaRecord rec = new ChukwaRecord();
      do {
        rec.add(kvMatcher.group(1), kvMatcher.group(2));
      } while (kvMatcher.find());
      Locality loc = getLocality(rec.getValue("src"), rec.getValue("dest"));
      rec.add("locality", loc.getLabel());

      calendar.setTimeInMillis(ms);
      calendar.set(Calendar.SECOND, 0);
      calendar.set(Calendar.MILLISECOND, 0);
      ms = calendar.getTimeInMillis();
      calendar.set(Calendar.MINUTE, 0);
      key.setKey(calendar.getTimeInMillis() + "/" + loc.getLabel() + "/" +
                 rec.getValue("op").toLowerCase() + "/" + ms);
      key.setReduceType("ClientTrace");
      rec.setTime(ms);

      rec.add(Record.tagsField, chunk.getTags());
      rec.add(Record.sourceField, chunk.getSource());
      rec.add(Record.applicationField, chunk.getApplication());
      output.collect(key, rec);

    } catch (ParseException e) {
      log.warn("Unable to parse the date in DefaultProcessor ["
          + recordEntry + "]", e);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.