Examples of ChukwaRecord


Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

      // +"][" + testFile.getName() +"]");
      goldReader = new SequenceFile.Reader(fs, inputFile, conf);
      testReader = new SequenceFile.Reader(fs, testFile, conf);

      ChukwaRecordKey goldKey = new ChukwaRecordKey();
      ChukwaRecord goldRecord = new ChukwaRecord();

      ChukwaRecordKey testKey = new ChukwaRecordKey();
      ChukwaRecord testRecord = new ChukwaRecord();

      // log.info(">>>>>>>>>>>>>> Start reading");
      while (goldReader.next(goldKey, goldRecord)) {
        testReader.next(testKey, testRecord);

        if (goldKey.compareTo(testKey) != 0) {
          log.info(">>>>>>>>>>>>>> Not the same Key");
          log.info(">>>>>>>>>>>>>> Record [" + goldKey.getKey() + "] ["
              + goldKey.getReduceType() + "]");
          log.info(">>>>>>>>>>>>>> Record [" + testKey.getKey() + "] ["
              + testKey.getReduceType() + "]");
          return false;
        }

        if (goldRecord.compareTo(testRecord) != 0) {
          log.info(">>>>>>>>>>>>>> Not the same Value");
          log.info(">>>>>>>>>>>>>> Record [" + goldKey.getKey() + "] ["
              + goldKey.getReduceType() + "]");
          log.info(">>>>>>>>>>>>>> Record [" + testKey.getKey() + "] ["
              + testKey.getReduceType() + "]");
          log.info(">>>>>>>>>>>>>> Gold Value [" + goldRecord.toString() + "]");
          log.info(">>>>>>>>>>>>>> Test value [" + testRecord.toString() + "]");

          return false;
        }
      }
      // log.info(">>>>>>>>>>>>>> Same File");
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

    FileSystem fs = FileSystem.get(new URI(fsName), conf);

    SequenceFile.Reader r = new SequenceFile.Reader(fs, new Path(args[0]), conf);

    ChukwaRecordKey key = new ChukwaRecordKey();
    ChukwaRecord record = new ChukwaRecord();
    try {
      while (r.next(key, record)) {
        System.out.println("\t ===== KEY   ===== ");

        System.out.println("DataType: " + key.getReduceType());
        System.out.println("\nKey: " + key.getKey());
        System.out.println("\t ===== Value =====");

        String[] fields = record.getFields();
        System.out.println("Timestamp : " + record.getTime());
        for (String field : fields) {
          System.out.println("[" + field + "] :" + record.getValue(field));
        }
      }
    } catch (Exception e) {
      e.printStackTrace();
    }
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

      if (stmt.execute(query)) {
        rs = stmt.getResultSet();
        ResultSetMetaData rmeta = rs.getMetaData();
        int col = rmeta.getColumnCount();
        while (rs.next()) {
          ChukwaRecord event = new ChukwaRecord();
          String cell = "";
          long timestamp = 0;

          for (int i = 1; i < col; i++) {
            String value = rs.getString(i);
            if (value != null) {
              cell = cell + " " + rmeta.getColumnName(i) + ":" + value;
            }
            if (rmeta.getColumnName(i).equals(timeField)) {
              timestamp = rs.getLong(i);
              event.setTime(timestamp);
            }
          }
          boolean isValid = false;
          if (filter == null || filter.equals("")) {
            isValid = true;
          } else if (cell.indexOf(filter) > 0) {
            isValid = true;
          }
          if (!isValid) {
            continue;
          }

          event.add(Record.bodyField, cell);
          event.add(Record.sourceField, cluster + "." + dataSource);
          if (records.containsKey(timestamp)) {
            records.get(timestamp).add(event);
          } else {
            List<Record> list = new LinkedList<Record>();
            list.add(event);
            records.put(event.getTime(), list);
          }
        }
      }
    } catch (SQLException e) {
      e.printStackTrace();
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

    //create the sequence file
    CreateRecordFile.makeTestSequenceFile(inputFile, outputFile, clusterName,
                                          dataType, streamName, processor);
    //read the output file
    ChukwaRecordKey key = new ChukwaRecordKey();
    ChukwaRecord record = new ChukwaRecord();

    Configuration conf = new Configuration();
    FileSystem fs = outputFile.getFileSystem(conf);
    SequenceFile.Reader sequenceReader = new SequenceFile.Reader(fs, outputFile, conf);

    //read the input file to assert
    BufferedReader inputReader = new BufferedReader(new FileReader(inputFile));

    String expectedHostname = InetAddress.getLocalHost().getHostName();

    //Read input and output back comparing each
    int i = 0;
    while (sequenceReader.next(key, record)) {
      String line = inputReader.readLine();
      assertNotNull("Sequence file contains more records than input file", line);

      long expectedTime = sdf.parse(line.substring(0,23)).getTime();
      calendar.setTimeInMillis(expectedTime);
      calendar.set(Calendar.MINUTE, 0);
      calendar.set(Calendar.SECOND, 0);
      calendar.set(Calendar.MILLISECOND, 0);

      String expectedKey = calendar.getTimeInMillis() + "/" +
                           expectedHostname + "/" + expectedTime;
      String expectedTags = "cluster=\"" + clusterName + "\"";

      //assert key
      assertEquals("Invalid key found for record " + i,   expectedKey, key.getKey());
      assertEquals("Invalid dataType found for record " + i, dataType, key.getReduceType());

      //assert record
      assertEquals("Invalid record time for record " + i, expectedTime, record.getTime());
      assertEquals("Invalid body for record " + i, line, record.getValue("body"));
      assertEquals("Invalid capp for record " + i, streamName, record.getValue("capp"));
      assertEquals("Invalid csource for record " + i, expectedHostname, record.getValue("csource"));
      assertEquals("Invalid ctags for record " + i, expectedTags , record.getValue("ctags").trim());

      i++;
    }

    sequenceReader.close();
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

    long currentTimeMillis = System.currentTimeMillis();
    boolean isSuccessful = true;
    String recordType = null;

    ChukwaRecordKey key = new ChukwaRecordKey();
    ChukwaRecord record = new ChukwaRecord();
    String cluster = null;
    int numOfRecords = 0;
    try {
      Pattern p = Pattern.compile("(.*)\\-(\\d+)$");
      int batch = 0;
      while (reader.next(key, record)) {
      numOfRecords++;
        if(first) {
          try {
            cluster = RecordUtil.getClusterName(record);
            initEnv(cluster);
            first=false;
          } catch(Exception ex) {
            log.error("Initialization failed for: "+cluster+".  Please check jdbc configuration.");
            return false;
          }
        }
        String sqlTime = DatabaseWriter.formatTimeStamp(record.getTime());
        log.debug("Timestamp: " + record.getTime());
        log.debug("DataType: " + key.getReduceType());

        String[] fields = record.getFields();
        String table = null;
        String[] priKeys = null;
        HashMap<String, HashMap<String, String>> hashReport = new HashMap<String, HashMap<String, String>>();
        StringBuilder normKey = new StringBuilder();
        String node = record.getValue("csource");
        recordType = key.getReduceType().toLowerCase();
        String dbKey = "report.db.name." + recordType;
        Matcher m = p.matcher(recordType);
        if (dbTables.containsKey(dbKey)) {
          String[] tmp = mdlConfig.findTableName(mdlConfig.get(dbKey), record
              .getTime(), record.getTime());
          table = tmp[0];
        } else if(m.matches()) {
          String timePartition = "_week";
          int timeSize = Integer.parseInt(m.group(2));
          if(timeSize == 5) {
            timePartition = "_month";
          } else if(timeSize == 30) {
            timePartition = "_quarter";
          } else if(timeSize == 180) {
            timePartition = "_year";
          } else if(timeSize == 720) {
            timePartition = "_decade";
          }
          int partition = (int) (record.getTime() / timeSize);
          StringBuilder tmpDbKey = new StringBuilder();
          tmpDbKey.append("report.db.name.");
          tmpDbKey.append(m.group(1));
          if(dbTables.containsKey(tmpDbKey.toString())) {
            StringBuilder tmpTable = new StringBuilder();
            tmpTable.append(dbTables.get(tmpDbKey.toString()));
            tmpTable.append("_");
            tmpTable.append(partition);
            tmpTable.append("_");
            tmpTable.append(timePartition);
            table = tmpTable.toString();
          } else {
            log.debug(tmpDbKey.toString() + " does not exist.");
            continue;           
          }
        } else {
          log.debug(dbKey + " does not exist.");
          continue;
        }
        log.debug("table name:" + table);
        try {
          priKeys = mdlConfig.get("report.db.primary.key." + recordType).split(
              ",");
        } catch (Exception nullException) {
          log.debug(ExceptionUtil.getStackTrace(nullException));
        }
        for (String field : fields) {
          String keyName = escape(field.toLowerCase(), newSpace);
          String keyValue = escape(record.getValue(field).toLowerCase(),
              newSpace);
          StringBuilder buildKey = new StringBuilder();
          buildKey.append("normalize.");
          buildKey.append(recordType);
          buildKey.append(".");
          buildKey.append(keyName);
          if (normalize.containsKey(buildKey.toString())) {
            if (normKey.toString().equals("")) {
              normKey.append(keyName);
              normKey.append(".");
              normKey.append(keyValue);
            } else {
              normKey.append(".");
              normKey.append(keyName);
              normKey.append(".");
              normKey.append(keyValue);
            }
          }
          StringBuilder normalizedKey = new StringBuilder();
          normalizedKey.append("metric.");
          normalizedKey.append(recordType);
          normalizedKey.append(".");
          normalizedKey.append(normKey);
          if (hashReport.containsKey(node)) {
            HashMap<String, String> tmpHash = hashReport.get(node);
            tmpHash.put(normalizedKey.toString(), keyValue);
            hashReport.put(node, tmpHash);
          } else {
            HashMap<String, String> tmpHash = new HashMap<String, String>();
            tmpHash.put(normalizedKey.toString(), keyValue);
            hashReport.put(node, tmpHash);
          }
        }
        for (String field : fields) {
          String valueName = escape(field.toLowerCase(), newSpace);
          String valueValue = escape(record.getValue(field).toLowerCase(),
              newSpace);
          StringBuilder buildKey = new StringBuilder();
          buildKey.append("metric.");
          buildKey.append(recordType);
          buildKey.append(".");
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

    String pk = "";

    try {

      ChukwaRecordKey key = new ChukwaRecordKey();
      ChukwaRecord record = new ChukwaRecord();

      record.setTime(System.currentTimeMillis());
      int inputSize = f.size();
      for(int i=0;i<inputSize;i++) {
        Object field = f.get(i);

        if (field == null) {
          continue;
        }

        if (i == this.pkFieldIndex) {
          pk = field.toString();
          continue;
        } else if ( i == this.sourceFieldIndex) {
          source = field.toString();
          continue;

        }else if ( i== this.recordTypeFieldIndex) {
          recordType = field.toString();
          continue;

        }else if ( i== this.applicationFieldIndex) {
          application = field.toString();
          continue;

        } else if ( i== this.clusterNameFieldIndex) {
          clusterName = field.toString();
          continue;

        }else if (i == this.timestampFieldIndex) {

          timestamp = Long.parseLong(field.toString());
          record.setTime(timestamp);

          synchronized (calendar)
          {
            calendar.setTimeInMillis(timestamp);
            calendar.set(Calendar.MINUTE, 0);
            calendar.set(Calendar.SECOND, 0);
            calendar.set(Calendar.MILLISECOND, 0);
            timePartition = calendar.getTimeInMillis();
          }
          record.setTime(Long.parseLong(field.toString()));
          continue;

        else if (field instanceof Map) {
          Map<Object, Object> m = (Map<Object, Object>)field;
          for(Object o: m.keySet()) {
            record.add(o.toString(),m.get(o).toString());
          }
          continue;
        } else {
          if (i <fields.length ) {
            record.add(fields[i],field.toString());
          } else {
            record.add("field-"+i,field.toString());
          }

          continue;
        }
      }

      record.add(Record.tagsField, " cluster=\"" + clusterName.trim() + "\" ");
      record.add(Record.sourceField, source);
      record.add(Record.applicationField, application);
      key.setKey("" + timePartition + "/" + pk + "/" + timestamp);
      key.setReduceType(recordType);

      writer.write(key, record);
    } catch (ExecException e) {
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

  public ChukwaLoader() {
  }

  @Override
  public Tuple getNext() throws IOException {
    ChukwaRecord record = null;

    try {
      if (!reader.nextKeyValue()) {
        return null;
      }
    } catch (InterruptedException e) {
        throw new IOException(e);
    }

    record = reader.getCurrentValue();

    Tuple ret = tf.newTuple(2);
    try
    {
      ret.set(0, new Long(record.getTime()));

      HashMap<Object, Object> pigMapFields = new HashMap<Object, Object>();
      TreeMap<String, Buffer> mapFields = record.getMapFields();

      if (mapFields != null)
      {
        for (String key : mapFields.keySet())
        {
          pigMapFields.put(key, new DataByteArray(record.getValue(key).getBytes()));
        }
      }
      ret.set(1, pigMapFields);

    } catch (ExecException e)
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

        }
       
        //FIXME: can probably optimize the above lines by doing a search in the raw bytes
        t= new Text(value.getData());
      } else if(k instanceof ChukwaRecordKey && v instanceof ChukwaRecord){
        ChukwaRecord value = (ChukwaRecord) v;
        Report xtrReport = Report.createFromString(value.getValue(Record.bodyField));
        bw = new BytesWritable(xtrReport.getMetadata().getTaskId().get());
        //FIXME: can probably optimize the above lines by doing a search in the raw bytes
        t= new Text(value.getValue(Record.bodyField));
      } else {
        log.error("unexpected key/value types: "+ k.getClass().getCanonicalName()
            + " and " + v.getClass().getCanonicalName() );
        return;
      }
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

      machine = "M0";
      long time = 1242205800; // Wed, 13 May 2009 09:10:00 GMT
      chukwaKey = TimePartion +"/" + machine +"/" + time;
      key.setKey(chukwaKey);
     
      ChukwaRecord record = new ChukwaRecord();
      record.setTime(time);
      record.add("csource", machine);
      record.add("A", "7");
      record.add("B", "3");
      record.add("C", "9");
 
      seqFileWriter.append(key, record);
    }

    {   
      machine = "M0";
      long time = 1242205800; // Wed, 13 May 2009 09:10:00 GMT
      chukwaKey = TimePartion +"/" + machine +"/" + time;
      key.setKey(chukwaKey);
     
      ChukwaRecord record = new ChukwaRecord();
      record.setTime(time);
      record.add("csource", machine);
      record.add("D", "1");
 
      seqFileWriter.append(key, record);
    }

    {   
      machine = "M1";
      long time = 1242205800; // Wed, 13 May 2009 09:10:00 GMT
      chukwaKey = TimePartion +"/" + machine +"/" + time;
      key.setKey(chukwaKey);
     
      ChukwaRecord record = new ChukwaRecord();
      record.setTime(time);
      record.add("csource", machine);
      record.add("A", "17");
 
      seqFileWriter.append(key, record);
    }
   
    {   
      machine = "M1";
      long time = 1242205800; // Wed, 13 May 2009 09:10:00 GMT
      chukwaKey = TimePartion +"/" + machine +"/" + time;
      key.setKey(chukwaKey);
     
      ChukwaRecord record = new ChukwaRecord();
      record.setTime(time);
      record.add("csource", machine);
      record.add("B", "37");
      record.add("C", "51");
      seqFileWriter.append(key, record);
    }
   
    {   
      machine = "M0";
      long time = 1242205860; // Wed, 13 May 2009 09:10:00 GMT
      chukwaKey = TimePartion +"/" + machine +"/" + time;
      key.setKey(chukwaKey);
     
      ChukwaRecord record = new ChukwaRecord();
      record.setTime(time);
      record.add("csource", machine);
      record.add("A", "8");
      record.add("C", "3");
      record.add("D", "12");
      seqFileWriter.append(key, record);
    }
   
    {   
      machine = "M0";
      long time = 1242205860; // Wed, 13 May 2009 09:11:00 GMT
      chukwaKey = TimePartion +"/" + machine +"/" + time;
      key.setKey(chukwaKey);
     
      ChukwaRecord record = new ChukwaRecord();
      record.setTime(time);
      record.add("csource", machine);
      record.add("A", "8");
      record.add("B", "6");
      seqFileWriter.append(key, record);
    }
   
    {   
      machine = "M1";
      long time = 1242205860; // Wed, 13 May 2009 09:11:00 GMT
      chukwaKey = TimePartion +"/" + machine +"/" + time;
      key.setKey(chukwaKey);
     
      ChukwaRecord record = new ChukwaRecord();
      record.setTime(time);
      record.add("csource", machine);
      record.add("A", "13.2");
      record.add("B", "23");
      record.add("C", "8.5");
      record.add("D", "6");
     
      // create duplicate
      seqFileWriter.append(key, record);
      seqFileWriter.append(key, record);
    }
   
    {   
      machine = "M0";
      long time = 1242205920; // Wed, 13 May 2009 09:12:00 GMT
      chukwaKey = TimePartion +"/" + machine +"/" + time;
      key.setKey(chukwaKey);
     
      ChukwaRecord record = new ChukwaRecord();
      record.setTime(time);
      record.add("csource", machine);
      record.add("A", "8");
      record.add("B", "6");
      record.add("C", "8");
      record.add("D", "6");
      record.add("E", "48.5");
      seqFileWriter.append(key, record);
    }
   
    {   
      machine = "M1";
      long time = 1242205920; // Wed, 13 May 2009 09:12:00 GMT
      chukwaKey = TimePartion +"/" + machine +"/" + time;
      key.setKey(chukwaKey);
     
      ChukwaRecord record = new ChukwaRecord();
      record.setTime(time);
      record.add("csource", machine);
      record.add("A", "8.3");
      record.add("B", "5.2");
      record.add("C", "37.7");
      record.add("D", "61.9");
      record.add("E", "40.3");
      seqFileWriter.append(key, record);
    }
   
    {   
      machine = "M1";
      long time = 1242205980; // Wed, 13 May 2009 09:13:00 GMT
      chukwaKey = TimePartion +"/" + machine +"/" + time;
      key.setKey(chukwaKey);
     
      ChukwaRecord record = new ChukwaRecord();
      record.setTime(time);
      record.add("csource", machine);
      record.add("A", "18.3");
      record.add("B", "1.2");
      record.add("C", "7.7");
      seqFileWriter.append(key, record);
    }
   
    {   
      machine = "M2";
      long time = 1242205980; // Wed, 13 May 2009 09:13:00 GMT
      chukwaKey = TimePartion +"/" + machine +"/" + time;
      key.setKey(chukwaKey);
     
      ChukwaRecord record = new ChukwaRecord();
      record.setTime(time);
      record.add("csource", machine);
      record.add("A", "8.9");
      record.add("B", "8.3");
      record.add("C", "7.2");
      record.add("D", "6.1");
      seqFileWriter.append(key, record);
    }
   
    {   
      machine = "M3";
      // late arrival T0
      long time = 1242205920; // Wed, 13 May 2009 09:12:00 GMT
      chukwaKey = TimePartion +"/" + machine +"/" + time;
      key.setKey(chukwaKey);
     
      ChukwaRecord record = new ChukwaRecord();
      record.setTime(time);
      record.add("csource", machine);
      record.add("A", "12.5");
      record.add("B", "26.82");
      record.add("C", "89.51");
      seqFileWriter.append(key, record);
    }
   
    {   
      machine = "M4";
      // late arrival T0
      long time = 1242205920; // Wed, 13 May 2009 09:12:00 GMT
      chukwaKey = TimePartion +"/" + machine +"/" + time;
      key.setKey(chukwaKey);
     
      ChukwaRecord record = new ChukwaRecord();
      record.setTime(time);
      record.add("csource", machine);
      record.add("A", "13.91");
      record.add("B", "21.02");
      record.add("C", "18.05");
      seqFileWriter.append(key, record);
    }
   
    seqFileWriter.close();
  }
View Full Code Here

Examples of org.apache.hadoop.chukwa.extraction.engine.ChukwaRecord

    log.info("File: [" +  file + "]" + fs.exists(new Path(file)));
    try {
      reader = new SequenceFile.Reader(fs, new Path(file), conf);

      ChukwaRecordKey key = new ChukwaRecordKey();
      ChukwaRecord record = new ChukwaRecord();

      StringBuilder sb = new StringBuilder();
      while (reader.next(key, record)) {
      
        sb.append("===== KEY   =====");

        sb.append("DataType: " + key.getReduceType());
        sb.append("Key: " + key.getKey());
        sb.append("===== Value =====");

        String[] fields = record.getFields();
        Arrays.sort(fields );
        sb.append("Timestamp : " + record.getTime());
        for (String field : fields) {
          sb.append("[" + field + "] :" + record.getValue(field));
        }
      }
     
      return sb.toString();
    } catch (Throwable e) {
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.