Package org.apache.hadoop.chukwa.util

Examples of org.apache.hadoop.chukwa.util.DatabaseWriter


    /*
     * find by timestamp
     */
    public static ClusterHadoopRpc find(String timestamp) {
  String cluster = getCluster();
  DatabaseWriter dbw = new DatabaseWriter(cluster);

  if (timestamp != null) {
      // get simple value
            try {
    String query = getSingleQuery(ClusterHadoopRpcHome.table,"timestamp",timestamp);
        ResultSet rs = dbw.query(query);
        if (rs.next()) {
        ClusterHadoopRpc obj = createClusterHadoopRpc(rs);
        return obj;
    }
      } catch (Exception e) {
View Full Code Here


    /*
     * find by key
     */
    public static ClusterHadoopRpc find(String timestamp, String host) {
  String cluster = getCluster();
  DatabaseWriter dbw = new DatabaseWriter(cluster);

  if (timestamp != null) {
      // get simple value
            try {
    Map<String, String> criteriaMap = new HashMap<String,String>();
    criteriaMap.put("timestamp",convertLongToDateString(Long.parseLong(timestamp)));
    criteriaMap.put("host",host);

    String query = getCriteriaQuery(ClusterHadoopRpcHome.table,criteriaMap);
        ResultSet rs = dbw.query(query);
        if (rs.next()) {
        ClusterHadoopRpc obj = createClusterHadoopRpc(rs);
        return obj;
    }
      } catch (Exception e) {
View Full Code Here

    /*
     * find within the start time and end time
     */
    public static Collection<ClusterHadoopRpc> findBetween(String starttime, String endtime) {
  String cluster = getCluster();
  DatabaseWriter dbw = new DatabaseWriter(cluster);

  Collection<ClusterHadoopRpc> collection = new Vector<ClusterHadoopRpc>();

  try {
      String query = getTimeBetweenQuery(ClusterHadoopRpcHome.table,starttime,endtime);     
      ResultSet rs = dbw.query(query);
      while (rs.next()) {
    ClusterHadoopRpc obj = createClusterHadoopRpc(rs);
    collection.add(obj);
      }
  } catch (Exception e) {
View Full Code Here

       boolean error = false;
       try{
           // SQL query to monitor database
           DatabaseConfig dbc = new DatabaseConfig();
           log.info("cluster:"+cluster);
           DatabaseWriter db = new DatabaseWriter(cluster);
           Calendar c = Calendar.getInstance();
           long now = c.getTimeInMillis();
           String[] tableName = dbc.findTableName("system_metrics", now, now);
           String query = "select unix_TIMESTAMP(now()) - unix_timestamp(max(timestamp)) as delay from "+tableName[0]+" ;";
           ResultSet rs = db.query(query);
           while(rs.next()) {
               long delay = rs.getLong(1);
               if(delay>600) {
                   log.error("Chukwa: "+cluster+": No new data for the past 30 minutes for system metrics");                  
                   error=true;
               }
           }
           query = "select count(*) as UpdatesPerHr from "+tableName[0]+" where Timestamp > date_sub(now(), interval 60 minute) ;";
           rs = db.query(query);
           while(rs.next()) {
               updates = rs.getLong(1);
               if(updates==0) {
                   log.error("Chukwa: "+cluster+": No system metrics data received for the past 60 minutes");                  
                   error=true;
               }
           }
           String[] hodTableNames = dbc.findTableName("HodJob", now, now);
           query = "select count(*) as UpdatesPerHr from "+hodTableNames[0]+" where StartTime > date_sub(now(), interval 60 minute) ;";
           rs = db.query(query);          
           while(rs.next()) {
               long updatesHod = rs.getLong(1);
               if(updatesHod==0) {
                   log.error("Chukwa: "+cluster+": No hod job data received for the past 60 minutes");
               }
           }
           String[] mrTableNames = dbc.findTableName("mr_job", now, now);
           query = "select count(*) as UpdatesPerHr from "+mrTableNames+" where FINISH_TIME > date_sub(now(), interval 1440 minute) ;";
           rs = db.query(query);                     
           while(rs.next()) {
               long updatesMR = rs.getLong(1);
               if(updatesMR==0) {
                   log.error("MDL: no map reduce job data received for the past day.");
                   error=true;
               }
           }
           db.close();
       }catch (Exception ex){
           log.error("Unexpected error:"+ex.getStackTrace().toString());
           System.exit(1);
       }
       if(!error) {
View Full Code Here

  String cluster = "demo";
  long current = Calendar.getInstance().getTimeInMillis();

  public void setUp() {
    System.setProperty("CLUSTER","demo");
    DatabaseWriter db = new DatabaseWriter(cluster);
    String buffer = "";
    File aFile = new File(System.getenv("CHUKWA_CONF_DIR")
                 + File.separator + "database_create_tables.sql");
    buffer = readFile(aFile);
    String tables[] = buffer.split(";");
    for(String table : tables) {
      if(table.length()>5) {
        db.execute(table);
      }
    }
    db.close();
    for(int i=0;i<timeWindow.length;i++) {
      TableCreator tc = new TableCreator();
      long start = current;
      long end = current + (timeWindow[i]*1440*60*1000);
      tc.createTables(start, end);
View Full Code Here

      tc.createTables(start, end);
    }
  }

  public void tearDown() {
    DatabaseWriter db = null;
    try {
      db = new DatabaseWriter(cluster);
      ResultSet rs = db.query("show tables");
      ArrayList<String> list = new ArrayList<String>();
      while(rs.next()) {
        String table = rs.getString(1);
        list.add(table);
      }
      for(String table : list) {
        db.execute("drop table "+table);
      }
    } catch(Throwable ex) {
    } finally {
      if(db!=null) {
        db.close();
      }
    }
  }
View Full Code Here

        mdl.process(sequenceFile.getPath());
      }
    } catch (Throwable ex) {
      fail("SQL Exception: "+ExceptionUtil.getStackTrace(ex));
    }
    DatabaseWriter db = new DatabaseWriter(cluster);
    for(int i=0;i<tables.length;i++) {
      String query = "select [avg("+tables[i]+")] from ["+tables[i]+"]";
      Macro mp = new Macro(current,query);
      query = mp.toString();
      try {
        ResultSet rs = db.query(query);
        ResultSetMetaData rsmd = rs.getMetaData();
        int numberOfColumns = rsmd.getColumnCount();
        while(rs.next()) {
          for(int j=1;j<=numberOfColumns;j++) {
            assertTrue("Table: "+tables[i]+", Column: "+rsmd.getColumnName(j)+", contains no data.",rs.getString(j)!=null);
          }
        }
      } catch(Throwable ex) {
        fail("MetricDataLoader failed: "+ExceptionUtil.getStackTrace(ex));
      }
    }
    db.close();
    assertTrue("MetricDataLoader executed successfully.",true);
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.chukwa.util.DatabaseWriter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.