Package org.apache.hadoop.chukwa.util

Examples of org.apache.hadoop.chukwa.util.DatabaseWriter


      e.printStackTrace();
      fail("Metric Data Loader Error.");
    }   
   
    // Verify Data
    DatabaseWriter db = null;
    try {
      db = new DatabaseWriter(cluster);
      Macro mp = new Macro(current,current, "select * from [system_metrics]");
      String query = mp.toString();
      ResultSet rs = db.query(query);
      ResultSetMetaData rmeta = rs.getMetaData();
      int size = rmeta.getColumnCount();
      while(rs.next()) {
        for(int i=1;i<=size;i++) {
          int columnType = rmeta.getColumnType(i);
          if(columnType==java.sql.Types.BIGINT ||
             columnType==java.sql.Types.INTEGER) {
            long testValue = rs.getLong(i);
            assertTrue(testValue<1000000000L);
          } else if(columnType==java.sql.Types.FLOAT ||
              columnType==java.sql.Types.DOUBLE) {
            double testValue = rs.getDouble(i);
            assertTrue(testValue<1000000000L);
          }
        }
      }
    } catch(Throwable ex) {
      fail("Data verification failed.");
    } finally {
      if(db!=null) {
        db.close();
      }
    }     
   
  }
View Full Code Here


  return null;
    }

    public static Collection<ClientTrace> findBetween(String starttime, String endtime) {
  String cluster = getCluster();
  DatabaseWriter dbw = new DatabaseWriter(cluster);

  Collection<ClientTrace> collection = new Vector<ClientTrace>();

  try {
      String query = getTimeBetweenQuery(ClientTraceHome.table,starttime,endtime);     
      ResultSet rs = dbw.query(query);
      while (rs.next()) {
    ClientTrace obj = createClientTrace(rs);
    collection.add(obj);
      }
  } catch (Exception e) {
View Full Code Here

    /*
     * find by timestamp
     */
    public static DfsDataNode find(String timestamp) {
  String cluster = getCluster();
  DatabaseWriter dbw = new DatabaseWriter(cluster);

  if (timestamp != null) {
      // get simple value
            try {
    String query = getSingleQuery(DfsDataNodeHome.table,"timestamp",timestamp);
        ResultSet rs = dbw.query(query);
        if (rs.next()) {
        DfsDataNode obj = createDfsDataNode(rs);
        return obj;
    }
      } catch (Exception e) {
View Full Code Here

    /*
     * find by key
     */
    public static DfsDataNode find(String timestamp, String host) {
  String cluster = getCluster();
  DatabaseWriter dbw = new DatabaseWriter(cluster);

  if (timestamp != null) {
      // get simple value
            try {
    Map<String, String> criteriaMap = new HashMap<String,String>();
    criteriaMap.put("timestamp",convertLongToDateString(Long.parseLong(timestamp)));
    criteriaMap.put("host",host);

    String query = getCriteriaQuery(DfsDataNodeHome.table,criteriaMap);
        ResultSet rs = dbw.query(query);
        if (rs.next()) {
        DfsDataNode obj = createDfsDataNode(rs);
        return obj;
    }
      } catch (Exception e) {
View Full Code Here

    /*
     * find within the start time and end time
     */
    public static Collection<DfsDataNode> findBetween(String starttime, String endtime) {
  String cluster = getCluster();
  DatabaseWriter dbw = new DatabaseWriter(cluster);

  Collection<DfsDataNode> collection = new Vector<DfsDataNode>();

  try {
      String query = getTimeBetweenQuery(DfsDataNodeHome.table,starttime,endtime);     
      ResultSet rs = dbw.query(query);
      while (rs.next()) {
    DfsDataNode obj = createDfsDataNode(rs);
    collection.add(obj);
      }
  } catch (Exception e) {
View Full Code Here

    /*
     * find by timestamp
     */
    public static HadoopJvm find(String timestamp) {
  String cluster = getCluster();
  DatabaseWriter dbw = new DatabaseWriter(cluster);

  if (timestamp != null) {
      // get simple value
            try {
    String query = getSingleQuery(HadoopJvmHome.table,"timestamp",timestamp);
        ResultSet rs = dbw.query(query);
        if (rs.next()) {
        HadoopJvm obj = createHadoopJvm(rs);
        return obj;
    }
      } catch (Exception e) {
View Full Code Here

    /*
     * find by key
     */
    public static HadoopJvm find(String timestamp, String host, String process_name) {
  String cluster = getCluster();
  DatabaseWriter dbw = new DatabaseWriter(cluster);

  if (timestamp != null) {
      // get simple value
            try {
    Map<String, String> criteriaMap = new HashMap<String,String>();
    criteriaMap.put("timestamp",convertLongToDateString(Long.parseLong(timestamp)));
    criteriaMap.put("host",host);
    criteriaMap.put("process_name",process_name);

    String query = getCriteriaQuery(HadoopJvmHome.table,criteriaMap);
        ResultSet rs = dbw.query(query);
        if (rs.next()) {
        HadoopJvm obj = createHadoopJvm(rs);
        return obj;
    }
      } catch (Exception e) {
View Full Code Here

    /*
     * find within the start time and end time
     */
    public static Collection<HadoopJvm> findBetween(String starttime, String endtime) {
  String cluster = getCluster();
  DatabaseWriter dbw = new DatabaseWriter(cluster);

  Collection<HadoopJvm> collection = new Vector<HadoopJvm>();

  try {
      String query = getTimeBetweenQuery(HadoopJvmHome.table,starttime,endtime);     
      ResultSet rs = dbw.query(query);
      while (rs.next()) {
    HadoopJvm obj = createHadoopJvm(rs);
    collection.add(obj);
      }
  } catch (Exception e) {
View Full Code Here

    /*
     * find by timestamp
     */
    public static DfsNameNode find(String timestamp) {
  String cluster = getCluster();
  DatabaseWriter dbw = new DatabaseWriter(cluster);

  if (timestamp != null) {
      // get simple value
            try {
    String query = getSingleQuery(DfsNameNodeHome.table,"timestamp",timestamp);
        ResultSet rs = dbw.query(query);
        if (rs.next()) {
        DfsNameNode obj = createDfsNameNode(rs);
        return obj;
    }
      } catch (Exception e) {
View Full Code Here

    /*
     * find by key
     */
    public static DfsNameNode find(String timestamp, String host) {
  String cluster = getCluster();
  DatabaseWriter dbw = new DatabaseWriter(cluster);

  if (timestamp != null) {
      // get simple value
            try {
    Map<String, String> criteriaMap = new HashMap<String,String>();
    criteriaMap.put("timestamp",convertLongToDateString(Long.parseLong(timestamp)));
    criteriaMap.put("host",host);

    String query = getCriteriaQuery(DfsNameNodeHome.table,criteriaMap);
        ResultSet rs = dbw.query(query);
        if (rs.next()) {
        DfsNameNode obj = createDfsNameNode(rs);
        return obj;
    }
      } catch (Exception e) {
View Full Code Here

TOP

Related Classes of org.apache.hadoop.chukwa.util.DatabaseWriter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.