Package org.apache.hadoop.hive.ql.exec.vector

Examples of org.apache.hadoop.hive.ql.exec.vector.LongColumnVector


    Assert.assertEquals(1.5, resultV.vector[0]);
    Assert.assertEquals(0.5, resultV.vector[4]);

    // test long->long version
    b = getVectorizedRowBatchLongInLongOut();
    LongColumnVector resultVLong = (LongColumnVector) b.cols[1];
    b.cols[0].noNulls = true;
    expr = new FuncAbsLongToLong(0, 1);
    expr.evaluate(b);
    Assert.assertEquals(2, resultVLong.vector[0]);
    Assert.assertEquals(2, resultVLong.vector[1]);
View Full Code Here


    return ArrayUtils.toPrimitive(indices);
  }

  private VectorizedRowBatch getVectorizedRandomRowBatch(int seed, int size) {
    VectorizedRowBatch batch = new VectorizedRowBatch(2, size);
    LongColumnVector lcv = new LongColumnVector(size);
    Random rand = new Random(seed);
    for (int i = 0; i < size; i++) {
      lcv.vector[i] = (rand.nextInt());
    }
    batch.cols[0] = lcv;
    batch.cols[1] = new LongColumnVector(size);
    batch.size = size;
    return batch;
  }
View Full Code Here

  /*
   * Input array is used to fill the entire size of the vector row batch
   */
  private VectorizedRowBatch getVectorizedRowBatch(int[] inputs, int size) {
    VectorizedRowBatch batch = new VectorizedRowBatch(2, size);
    LongColumnVector lcv = new LongColumnVector(size);
    for (int i = 0; i < size; i++) {
      lcv.vector[i] = inputs[i % inputs.length];
    }
    batch.cols[0] = lcv;
    batch.cols[1] = new LongColumnVector(size);
    batch.size = size;
    return batch;
  }
View Full Code Here

    return ArrayUtils.toPrimitive(indices);
  }

  private VectorizedRowBatch getVectorizedRandomRowBatchLong2(int seed, int size) {
    VectorizedRowBatch batch = new VectorizedRowBatch(2, size);
    LongColumnVector lcv = new LongColumnVector(size);
    Random rand = new Random(seed);
    for (int i = 0; i < size; i++) {
      /* all 32 bit numbers qualify & multiply up to get nano-seconds */
      lcv.vector[i] = (long)(1000*1000*1000*rand.nextInt());
    }
    batch.cols[0] = lcv;
    batch.cols[1] = new LongColumnVector(size);
    batch.size = size;
    return batch;
  }
View Full Code Here

      bcv.vector[i] = encoded;
      bcv.start[i] = 0;
      bcv.length[i] = encoded.length;
    }
    batch.cols[0] = bcv;
    batch.cols[1] = new LongColumnVector(size);
    batch.size = size;
    return batch;
  }
View Full Code Here

  /*
   * Input array is used to fill the entire size of the vector row batch
   */
  private VectorizedRowBatch getVectorizedRowBatchLong2(long[] inputs, int size) {
    VectorizedRowBatch batch = new VectorizedRowBatch(2, size);
    LongColumnVector lcv = new LongColumnVector(size);
    for (int i = 0; i < size; i++) {
      lcv.vector[i] = inputs[i % inputs.length];
    }
    batch.cols[0] = lcv;
    batch.cols[1] = new LongColumnVector(size);
    batch.size = size;
    return batch;
  }
View Full Code Here

      bcv.vector[i] = encoded;
      bcv.start[i] = 0;
      bcv.length[i] = encoded.length;
    }
    batch.cols[0] = bcv;
    batch.cols[1] = new LongColumnVector(size);
    batch.size = size;
    return batch;
  }
View Full Code Here

    bcv.vector[0] = vector;
    bcv.start[0] = start;
    bcv.length[0] = length;

    batch.cols[0] = bcv;
    batch.cols[1] = new LongColumnVector(1);
    batch.size = 1;
    return batch;
  }
View Full Code Here

    testVectorUDFYear(TestType.STRING_LONG);

    VectorizedRowBatch batch = getVectorizedRowBatchStringLong(new byte[] {'2', '2', '0', '1', '3'}, 1, 3);
    VectorExpression udf = new VectorUDFYearString(0, 1);
    udf.evaluate(batch);
    LongColumnVector lcv = (LongColumnVector) batch.cols[1];
    Assert.assertEquals(false, batch.cols[0].isNull[0]);
    Assert.assertEquals(true, lcv.isNull[0]);
  }
View Full Code Here

      }
    }
  } 

  private void testWriterLong(TypeInfo type) throws HiveException {
    LongColumnVector lcv = VectorizedRowGroupGenUtil.generateLongColumnVector(true, false,
        vectorSize, new Random(10));
    lcv.isNull[3] = true;
    VectorExpressionWriter vew = getWriter(type);
    for (int i = 0; i < vectorSize; i++) {
      Writable w = (Writable) vew.writeValue(lcv, i);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.exec.vector.LongColumnVector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.