Package org.apache.hadoop.chukwa.hicc.bean

Examples of org.apache.hadoop.chukwa.hicc.bean.Series


    seriesName.append(":");
    seriesName.append(family);
    seriesName.append(":");
    seriesName.append(column);

    Series series = new Series(seriesName.toString());
    try {
      HTableInterface table = pool.getTable(tableName);
      Calendar c = Calendar.getInstance();
      c.setTimeInMillis(startTime);
      c.set(Calendar.MINUTE, 0);
      c.set(Calendar.SECOND, 0);
      c.set(Calendar.MILLISECOND, 0);
      String startRow = c.getTimeInMillis()+rkey;
      Scan scan = new Scan();
      scan.addColumn(family.getBytes(), column.getBytes());
      scan.setStartRow(startRow.getBytes());
      scan.setTimeRange(startTime, endTime);
      scan.setMaxVersions();
      if(filterByRowKey) {
        RowFilter rf = new RowFilter(CompareOp.EQUAL, new
            RegexStringComparator("[0-9]+-"+rkey+"$"));
        scan.setFilter(rf);
      }
      ResultScanner results = table.getScanner(scan);
      Iterator<Result> it = results.iterator();
      // TODO: Apply discrete wavelet transformation to limit the output
      // size to 1000 data points for graphing optimization. (i.e jwave)
      while(it.hasNext()) {
        Result result = it.next();
        String temp = new String(result.getValue(family.getBytes(), column.getBytes()));
        double value = Double.parseDouble(temp);
        // TODO: Pig Store function does not honor HBase timestamp, hence need to parse rowKey for timestamp.
        String buf = new String(result.getRow());
        Long timestamp = Long.parseLong(buf.split("-")[0]);
        // If Pig Store function can honor HBase timestamp, use the following line is better.
        // series.add(result.getCellValue().getTimestamp(), value);
        series.add(timestamp, value);
      }
      results.close();
      table.close();
    } catch(Exception e) {
      log.error(ExceptionUtil.getStackTrace(e));
View Full Code Here


  @Path("series/{table}/{family}/{column}/rowkey/{rkey}")
  @Produces("application/json")
  public String getSeries(@Context HttpServletRequest request, @PathParam("table") String table, @PathParam("family") String family, @PathParam("column") String column, @PathParam("rkey") String rkey, @QueryParam("start") String start, @QueryParam("end") String end) {
    SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
    String buffer = "";
    Series series;
    long startTime = 0;
    long endTime = 0;
    TimeHandler time = new TimeHandler(request);
    try {
      if(start!=null) {
        startTime = sdf.parse(start).getTime();
      } else {
        startTime = time.getStartTime();
      }
      if(end!=null) {
        endTime = sdf.parse(end).getTime();
      } else {
        endTime = time.getEndTime();
      }
      if(rkey!=null) {
        series = ChukwaHBaseStore.getSeries(table, rkey, family, column, startTime, endTime, true);
        buffer = series.toString();
      } else {
        throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST)
            .entity("No row key defined.").build());
      }
    } catch (ParseException e) {
View Full Code Here

      if(skey!=null) {
          HttpSession session = request.getSession();
          String[] rkeys = (session.getAttribute(skey).toString()).split(",");
          JSONArray seriesList = new JSONArray();
          for(String rowKey : rkeys) {
            Series output = ChukwaHBaseStore.getSeries(table, rowKey, family, qualifier, startTime, endTime, true);
            seriesList.add(output.toJSONObject());
          }
          buffer = seriesList.toString();
      } else {
        throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST)
            .entity("No session attribute key defined.").build());
View Full Code Here

  @Path("series/{table}/{family}/{column}/rowkey/{rkey}")
  @Produces("application/json")
  public String getSeries(@Context HttpServletRequest request, @PathParam("table") String table, @PathParam("family") String family, @PathParam("column") String column, @PathParam("rkey") String rkey, @QueryParam("start") String start, @QueryParam("end") String end) {
    SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
    String buffer = "";
    Series series;
    long startTime = 0;
    long endTime = 0;
    TimeHandler time = new TimeHandler(request);
    try {
      if(start!=null) {
        startTime = sdf.parse(start).getTime();
      } else {
        startTime = time.getStartTime();
      }
      if(end!=null) {
        endTime = sdf.parse(end).getTime();
      } else {
        endTime = time.getEndTime();
      }
      if(rkey!=null) {
        series = ChukwaHBaseStore.getSeries(table, rkey, family, column, startTime, endTime, true);
        buffer = series.toString();
      } else {
        throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST)
            .entity("No row key defined.").build());
      }
    } catch (ParseException e) {
View Full Code Here

          JSONArray seriesList = new JSONArray();
          for(String rowKey : rkeys) {
          if (rowKey == null || rowKey.equals("")) {
            continue;
          }
            Series output = ChukwaHBaseStore.getSeries(table, rowKey, family, qualifier, startTime, endTime, true);
            seriesList.add(output.toJSONObject());
          }
          buffer = seriesList.toString();
      } else {
        throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST)
            .entity("No session attribute key defined.").build());
View Full Code Here

    seriesName.append(":");
    seriesName.append(family);
    seriesName.append(":");
    seriesName.append(column);

    Series series = new Series(seriesName.toString());
    try {
      HTableInterface table = pool.getTable(tableName);
      Calendar c = Calendar.getInstance();
      c.setTimeInMillis(startTime);
      c.set(Calendar.MINUTE, 0);
      c.set(Calendar.SECOND, 0);
      c.set(Calendar.MILLISECOND, 0);
      String startRow = c.getTimeInMillis()+rkey;
      Scan scan = new Scan();
      scan.addColumn(family.getBytes(), column.getBytes());
      scan.setStartRow(startRow.getBytes());
      scan.setTimeRange(startTime, endTime);
      scan.setMaxVersions();
      if(filterByRowKey) {
        RowFilter rf = new RowFilter(CompareOp.EQUAL, new
            RegexStringComparator("[0-9]+-"+rkey+"$"));
        scan.setFilter(rf);
      }
      ResultScanner results = table.getScanner(scan);
      Iterator<Result> it = results.iterator();
      // TODO: Apply discrete wavelet transformation to limit the output
      // size to 1000 data points for graphing optimization. (i.e jwave)
      while(it.hasNext()) {
        Result result = it.next();
        String temp = new String(result.getValue(family.getBytes(), column.getBytes()));
        double value = Double.parseDouble(temp);
        // TODO: Pig Store function does not honor HBase timestamp, hence need to parse rowKey for timestamp.
        String buf = new String(result.getRow());
        Long timestamp = Long.parseLong(buf.split("-")[0]);
        // If Pig Store function can honor HBase timestamp, use the following line is better.
        // series.add(result.getCellValue().getTimestamp(), value);
        series.add(timestamp, value);
      }
      results.close();
      table.close();
    } catch(Exception e) {
      log.error(ExceptionUtil.getStackTrace(e));
View Full Code Here

TOP

Related Classes of org.apache.hadoop.chukwa.hicc.bean.Series

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.