Package org.apache.hadoop.hive.ql.exec.vector

Examples of org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector


  @Test
  public void testDoubleScalarScalarIfExpr() {
    VectorizedRowBatch batch = getBatch1Long3DoubleVectors();
    VectorExpression expr = new IfExprDoubleScalarDoubleScalar(0, 100.0d, 200.0d, 3);
    DoubleColumnVector r = (DoubleColumnVector) batch.cols[3];
    expr.evaluate(batch);
    assertEquals(true, 200d == r.vector[0]);
    assertEquals(true, 200d == r.vector[1]);
    assertEquals(true, 100d == r.vector[2]);
    assertEquals(true, 100d == r.vector[3]);
View Full Code Here


  @Test
  public void testDoubleScalarColumnIfExpr() {
    VectorizedRowBatch batch = getBatch1Long3DoubleVectors();
    VectorExpression expr = new IfExprDoubleScalarDoubleColumn(0, 100.0d, 2, 3);
    DoubleColumnVector r = (DoubleColumnVector) batch.cols[3];
    expr.evaluate(batch);
    assertEquals(true, 1d == r.vector[0]);
    assertEquals(true, 2d == r.vector[1]);
    assertEquals(true, 100d == r.vector[2]);
    assertEquals(true, 100d == r.vector[3]);
View Full Code Here

  @Test
  public void testDoubleColumnScalarIfExpr() {
    VectorizedRowBatch batch = getBatch1Long3DoubleVectors();
    VectorExpression expr = new IfExprDoubleColumnDoubleScalar(0, 1, 200d, 3);
    DoubleColumnVector r = (DoubleColumnVector) batch.cols[3];
    expr.evaluate(batch);
    assertEquals(true, 200d == r.vector[0]);
    assertEquals(true, 200d == r.vector[1]);
    assertEquals(true, -3d == r.vector[2]);
    assertEquals(true, -4d == r.vector[3]);
View Full Code Here

            bcv.length[row] = bytes.length;
          }
        };
      } else if (types[i].equalsIgnoreCase("double") ||
          types[i].equalsIgnoreCase("float")) {
        batch.cols[i] = new DoubleColumnVector(batchSize);
        columnAssign[i] = new ColumnVectorAssign() {
          @Override
          public void assign(
              ColumnVector columnVector,
              int row,
              Object value) {
            DoubleColumnVector dcv = (DoubleColumnVector) columnVector;
            dcv.vector[row] = Double.valueOf(value.toString());
          }
        };
      } else if (types[i].toLowerCase().startsWith("decimal")) {
            Pattern decimalPattern = Pattern.compile(
                "decimal(?:\\((\\d+)(?:\\,(\\d+))?\\))?", Pattern.CASE_INSENSITIVE);
            Matcher mr = decimalPattern.matcher(types[i]);
            int precission = 38;
            int scale = 0;
            if (mr.matches()) {
              String typePrecission = mr.group(1);
              if (typePrecission != null) {
                precission = Integer.parseInt(typePrecission);
              }
              String typeScale = mr.group(2);
              if (typeScale != null) {
                scale = Integer.parseInt(typeScale);
              }
            }

            batch.cols[i] = new DecimalColumnVector(batchSize, precission, scale);
            columnAssign[i] = new ColumnVectorAssign() {
                @Override
                public void assign(
                        ColumnVector columnVector,
                        int row,
                        Object value) {
                    DecimalColumnVector dcv = (DecimalColumnVector) columnVector;
                    dcv.set(row, (HiveDecimal) value);
                }
            };
      } else {
        throw new HiveException("Unimplemented type " + types[i]);
      }
View Full Code Here

      super.evaluateChildren(batch);
    }

    LongColumnVector inputColVector1 = (LongColumnVector) batch.cols[colNum1];
    LongColumnVector inputColVector2 = (LongColumnVector) batch.cols[colNum2];
    DoubleColumnVector outputColVector = (DoubleColumnVector) batch.cols[outputColumn];
    int[] sel = batch.selected;
    int n = batch.size;
    long[] vector1 = inputColVector1.vector;
    long[] vector2 = inputColVector2.vector;
    double[] outputVector = outputColVector.vector;
View Full Code Here

    if (childExpressions != null) {
      super.evaluateChildren(batch);
    }

    DoubleColumnVector inV = (DoubleColumnVector) batch.cols[inputColumn];
    int[] sel = batch.selected;
    int n = batch.size;
    DecimalColumnVector outV = (DecimalColumnVector) batch.cols[outputColumn];

    if (n == 0) {
View Full Code Here

    // Spot check only. null & repeating behavior are checked elsewhere for the same template.
    int seed = 17;
    VectorizedRowBatch vrb = VectorizedRowGroupGenUtil.getVectorizedRowBatch(
        5, 2, seed);
    DoubleColumnVector dcv0 = new DoubleColumnVector();
    vrb.cols[0] = dcv0;

    //Basic case
    dcv0.vector[0] = 5;
    dcv0.vector[1] = 20;
View Full Code Here

    // Spot check only. null & repeating behavior are checked elsewhere for the same template.
    int seed = 17;
    VectorizedRowBatch vrb = VectorizedRowGroupGenUtil.getVectorizedRowBatch(
        5, 2, seed);
    vrb.cols[0] = new DoubleColumnVector();
    DoubleColumnVector dcv = (DoubleColumnVector) vrb.cols[0];

    //Basic case
    dcv.vector[0] = 5;
    dcv.vector[1] = 20;
    dcv.vector[2] = 17;
View Full Code Here

  @Test
  public void testFilterDoubleIn() {
    int seed = 17;
    VectorizedRowBatch vrb = VectorizedRowGroupGenUtil.getVectorizedRowBatch(
        5, 2, seed);
    DoubleColumnVector dcv0 = new DoubleColumnVector();
    vrb.cols[0] = dcv0;
    double[] inList = {5.0, 20.2};
    FilterDoubleColumnInList f = new FilterDoubleColumnInList(0);
    f.setInListValues(inList);
    VectorExpression expr1 = f;
View Full Code Here

    }
    return null;
  }

  private void testWriterDouble(TypeInfo type) throws HiveException {
    DoubleColumnVector dcv = VectorizedRowGroupGenUtil.generateDoubleColumnVector(true, false,
        this.vectorSize, new Random(10));
    dcv.isNull[2] = true;
    VectorExpressionWriter vew = getWriter(type);
    for (int i = 0; i < vectorSize; i++) {
      Writable w = (Writable) vew.writeValue(dcv, i);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.