Package org.apache.drill.exec.record

Examples of org.apache.drill.exec.record.RecordBatchLoader


      AllocationHelper.allocate(v, 100, 50);
      v.getMutator().generateTestData(100);
    }

    WritableBatch writableBatch = WritableBatch.getBatchNoHV(100, vectors, false);
    RecordBatchLoader batchLoader = new RecordBatchLoader(allocator);
    ByteBuf[] byteBufs = writableBatch.getBuffers();
    int bytes = 0;
    for (int i = 0; i < byteBufs.length; i++) {
      bytes += byteBufs[i].writerIndex();
    }
    DrillBuf byteBuf = allocator.buffer(bytes);
    int index = 0;
    for (int i = 0; i < byteBufs.length; i++) {
      byteBufs[i].readBytes(byteBuf, index, byteBufs[i].writerIndex());
      index += byteBufs[i].writerIndex();
    }
    byteBuf.writerIndex(bytes);

    batchLoader.load(writableBatch.getDef(), byteBuf);
    boolean firstColumn = true;
    int recordCount = 0;
    for (VectorWrapper<?> v : batchLoader) {
      if (firstColumn) {
        firstColumn = false;
      } else {
        System.out.print("\t");
      }
      System.out.print(v.getField().toExpr());
      System.out.print("[");
      System.out.print(v.getField().getType().getMinorType());
      System.out.print("]");
    }

    System.out.println();
    for (int r = 0; r < batchLoader.getRecordCount(); r++) {
      boolean first = true;
      recordCount++;
      for (VectorWrapper<?> v : batchLoader) {
        if (first) {
          first = false;
        } else {
          System.out.print("\t");
        }
        ValueVector.Accessor accessor = v.getValueVector().getAccessor();
        if (v.getField().getType().getMinorType() == TypeProtos.MinorType.VARCHAR) {
          Object obj = accessor.getObject(r);
          if (obj != null)
            System.out.print(accessor.getObject(r));
          else
            System.out.print("NULL");
        } else {
          System.out.print(accessor.getObject(r));
        }
      }
      if (!first) System.out.println();
    }
    assertEquals(100, recordCount);
    batchLoader.clear();
    writableBatch.clear();
  }
View Full Code Here


    int numRecords = 0;
    String planString = Resources.toString(Resources.getResource("functions/hive/GenericUDF.json"), Charsets.UTF_8);
    List<QueryResultBatch> results = testPhysicalWithResults(planString);

    RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
    for (QueryResultBatch result : results) {
      batchLoader.load(result.getHeader().getDef(), result.getData());
      if (batchLoader.getRecordCount() <= 0) {
        result.release();
        batchLoader.clear();
        continue;
      }
      // Output columns and types
      //  1. str1 : Var16Char
      //  2. upperStr1 : NullableVar16Char
      //  3. unix_timestamp : NullableBigInt
      //  4. concat : NullableVarChar
      //  5. flt1 : Float4
      //  6. format_number : NullableFloat8
      //  7. nullableStr1 : NullableVar16Char
      //  8. upperNullableStr1 : NullableVar16Char
      Var16CharVector str1V = (Var16CharVector) batchLoader.getValueAccessorById(Var16CharVector.class, 0).getValueVector();
      NullableVar16CharVector upperStr1V = (NullableVar16CharVector) batchLoader.getValueAccessorById(NullableVar16CharVector.class, 1).getValueVector();
      NullableBigIntVector unix_timestampV = (NullableBigIntVector) batchLoader.getValueAccessorById(NullableBigIntVector.class, 2).getValueVector();
      NullableVar16CharVector concatV = (NullableVar16CharVector) batchLoader.getValueAccessorById(NullableVar16CharVector.class, 3).getValueVector();
      Float4Vector flt1V = (Float4Vector) batchLoader.getValueAccessorById(Float4Vector.class, 4).getValueVector();
      NullableVar16CharVector format_numberV = (NullableVar16CharVector) batchLoader.getValueAccessorById(NullableVar16CharVector.class, 5).getValueVector();
      NullableVar16CharVector nullableStr1V = (NullableVar16CharVector) batchLoader.getValueAccessorById(NullableVar16CharVector.class, 6).getValueVector();
      NullableVar16CharVector upperNullableStr1V = (NullableVar16CharVector) batchLoader.getValueAccessorById(NullableVar16CharVector.class, 7).getValueVector();

      for (int i=0; i<batchLoader.getRecordCount(); i++) {
        String in = new String(str1V.getAccessor().get(i), Charsets.UTF_16);
        String upper = new String(upperStr1V.getAccessor().get(i), Charsets.UTF_16);
        assertTrue(in.toUpperCase().equals(upper));

        long unix_timestamp = unix_timestampV.getAccessor().get(i);

        String concat = new String(concatV.getAccessor().get(i), Charsets.UTF_16);
        assertTrue(concat.equals(in+"-"+in));

        float flt1 = flt1V.getAccessor().get(i);
        String format_number = new String(format_numberV.getAccessor().get(i), Charsets.UTF_16);


        String nullableStr1 = null;
        if (!nullableStr1V.getAccessor().isNull(i)) {
          nullableStr1 = new String(nullableStr1V.getAccessor().get(i), Charsets.UTF_16);
        }

        String upperNullableStr1 = null;
        if (!upperNullableStr1V.getAccessor().isNull(i)) {
          upperNullableStr1 = new String(upperNullableStr1V.getAccessor().get(i), Charsets.UTF_16);
        }

        assertEquals(nullableStr1 != null, upperNullableStr1 != null);
        if (nullableStr1 != null) {
          assertEquals(nullableStr1.toUpperCase(), upperNullableStr1);
        }

        System.out.println(in + ", " + upper + ", " + unix_timestamp + ", " + concat + ", " +
          flt1 + ", " + format_number + ", " + nullableStr1 + ", " + upperNullableStr1);

        numRecords++;
      }

      result.release();
      batchLoader.clear();
    }

    System.out.println("Processed " + numRecords + " records");
  }
View Full Code Here

  public void testUDF() throws Throwable {
    int numRecords = 0;
    String planString = Resources.toString(Resources.getResource("functions/hive/UDF.json"), Charsets.UTF_8);
    List<QueryResultBatch> results = testPhysicalWithResults(planString);

    RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
    for (QueryResultBatch result : results) {
      batchLoader.load(result.getHeader().getDef(), result.getData());
      if (batchLoader.getRecordCount() <= 0) {
        result.release();
        batchLoader.clear();
        continue;
      }

      // Output columns and types
      // 1. str1 : Var16Char
      // 2. str1Length : Int
      // 3. str1Ascii : Int
      // 4. flt1 : Float4
      // 5. pow : Float8
      Var16CharVector str1V = (Var16CharVector) batchLoader.getValueAccessorById(Var16CharVector.class, 0).getValueVector();
      NullableIntVector str1LengthV = (NullableIntVector) batchLoader.getValueAccessorById(NullableIntVector.class, 1).getValueVector();
      NullableIntVector str1AsciiV = (NullableIntVector) batchLoader.getValueAccessorById(NullableIntVector.class, 2).getValueVector();
      Float4Vector flt1V = (Float4Vector) batchLoader.getValueAccessorById(Float4Vector.class, 3).getValueVector();
      NullableFloat8Vector powV = (NullableFloat8Vector) batchLoader.getValueAccessorById(NullableFloat8Vector.class, 4).getValueVector();

      for (int i=0; i<batchLoader.getRecordCount(); i++) {
        String str1 = new String(str1V.getAccessor().get(i), Charsets.UTF_16);
        int str1Length = str1LengthV.getAccessor().get(i);
        assertTrue(str1.length() == str1Length);

        int str1Ascii = str1AsciiV.getAccessor().get(i);

        float flt1 = flt1V.getAccessor().get(i);

        double pow = 0;
        if (!powV.getAccessor().isNull(i)) {
          pow = powV.getAccessor().get(i);
          assertTrue(Math.pow(flt1, 2.0) == pow);
        }

        System.out.println(str1 + ", " + str1Length + ", " + str1Ascii + ", " + flt1 + ", " + pow);
        numRecords++;
      }

      result.release();
      batchLoader.clear();
    }

    System.out.println("Processed " + numRecords + " records");
  }
View Full Code Here

      ResultSetMetaData resultSetMetaData, TimeZone timeZone) {
    super(statement, prepareResult, resultSetMetaData, timeZone);
    DrillConnection c = (DrillConnection) statement.getConnection();
    DrillClient client = c.getClient();
    // DrillClient client, DrillStatement statement) {
    currentBatch = new RecordBatchLoader(client.getAllocator());
    this.client = client;
    cursor = new DrillCursor(this);
  }
View Full Code Here

    String path = "/physical_test2.json";
//      String path = "/filter/test1.json";
    List<QueryResultBatch> results = client.runQuery(QueryType.PHYSICAL, Files.toString(FileUtils.getResourceAsFile(path), Charsets.UTF_8));

    // look at records
    RecordBatchLoader batchLoader = new RecordBatchLoader(client.getAllocator());
    int recordCount = 0;
    for (QueryResultBatch batch : results) {
      boolean schemaChanged = batchLoader.load(batch.getHeader().getDef(), batch.getData());
      boolean firstColumn = true;

      // print headers.
      if (schemaChanged) {
        System.out.println("\n\n========NEW SCHEMA=========\n\n");
        for (VectorWrapper<?> value : batchLoader) {

          if (firstColumn) {
            firstColumn = false;
          } else {
            System.out.print("\t");
          }
          System.out.print(value.getField().toExpr());
          System.out.print("[");
          System.out.print(value.getField().getType().getMinorType());
          System.out.print("]");
        }
        System.out.println();
      }

      for (int i = 0; i < batchLoader.getRecordCount(); i++) {
        boolean first = true;
        recordCount++;
        for (VectorWrapper<?> value : batchLoader) {
          if (first) {
            first = false;
          } else {
            System.out.print("\t");
          }
          System.out.print(value.getValueVector().getAccessor().getObject(i));
        }
        if (!first) {
          System.out.println();
        }
      }
      batchLoader.clear();
      batch.release();
    }
    logger.debug("Received results {}", results);
    assertEquals(recordCount, 200);
    }
View Full Code Here

          Files.toString(FileUtils.getResourceAsFile("/physical_json_scan_test1.json"), Charsets.UTF_8)
              .replace("#{TEST_FILE}", FileUtils.getResourceAsFile("/scan_json_test_1.json").toURI().toString())
      );

      // look at records
      RecordBatchLoader batchLoader = new RecordBatchLoader(new TopLevelAllocator(CONFIG));
      int recordCount = 0;

      //int expectedBatchCount = 2;

      //assertEquals(expectedBatchCount, results.size());

      for (int i = 0; i < results.size(); ++i) {
        QueryResultBatch batch = results.get(i);
        if (i == 1) {
          assertTrue(batch.hasData());
        } else {
          assertFalse(batch.hasData());
          batch.release();
          continue;
        }

        assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
        boolean firstColumn = true;

        // print headers.
        System.out.println("\n\n========NEW SCHEMA=========\n\n");
        for (VectorWrapper<?> v : batchLoader) {

          if (firstColumn) {
            firstColumn = false;
          } else {
            System.out.print("\t");
          }
          System.out.print(v.getField().toExpr());
          System.out.print("[");
          System.out.print(v.getField().getType().getMinorType());
          System.out.print("]");
        }

        System.out.println();


        for (int r = 0; r < batchLoader.getRecordCount(); r++) {
          boolean first = true;
          recordCount++;
          for (VectorWrapper<?> v : batchLoader) {
            if (first) {
              first = false;
            } else {
              System.out.print("\t");
            }

            ValueVector.Accessor accessor = v.getValueVector().getAccessor();
            System.out.print(accessor.getObject(r));
          }
          if (!first) {
            System.out.println();
          }
        }
        batchLoader.clear();
        batch.release();
      }

      assertEquals(2, recordCount);
    }
View Full Code Here

      bit.run();
      client.connect();
      List<QueryResultBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
          Files.toString(FileUtils.getResourceAsFile("/functions/cast/two_way_implicit_cast.json"), Charsets.UTF_8));

      RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());

      QueryResultBatch batch = results.get(0);
      assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));

      Iterator<VectorWrapper<?>> itr = batchLoader.iterator();

      ValueVector.Accessor intAccessor1 = itr.next().getValueVector().getAccessor();
      ValueVector.Accessor varcharAccessor1 = itr.next().getValueVector().getAccessor();

      for (int i = 0; i < intAccessor1.getValueCount(); i++) {
        System.out.println(intAccessor1.getObject(i));
        assertEquals(intAccessor1.getObject(i), 10);
        System.out.println(varcharAccessor1.getObject(i));
        assertEquals(varcharAccessor1.getObject(i).toString(), "101");
      }

      batchLoader.clear();
      for (QueryResultBatch result : results) {
        result.release();
      }
    }
  }
View Full Code Here

        "L_RETURNFLAG, L_LINESTATUS, L_SHIPDATE,COMMITDATE ,RECEIPTDATE, L_SHIPINSTRUCT, L_SHIPMODE, L_COMMENT";
    String inputTable = "cp.`tpch/lineitem.parquet`";
    String query = String.format("SELECT %s FROM %s", selection, inputTable);
    List<QueryResultBatch> expected = testSqlWithResults(query);
    BatchSchema schema = null;
    RecordBatchLoader loader = new RecordBatchLoader(getAllocator());
    List<Map> expectedRecords = new ArrayList<>();
    // read the data out of the results, the error manifested itself upon call of getObject on the vectors as they had contained deadbufs
    addToMaterializedResults(expectedRecords, expected, loader, schema);
    for (QueryResultBatch result : expected) {
      result.release();
View Full Code Here

  public void compareParquetReaders(String selection, String table) throws Exception {
    test("alter system set `store.parquet.use_new_reader` = true");
    List<QueryResultBatch> expected = testSqlWithResults("select " + selection + " from " + table);

    RecordBatchLoader loader = new RecordBatchLoader(getAllocator());
    BatchSchema schema = null;

    List<Map> expectedRecords = new ArrayList<>();
    addToMaterializedResults(expectedRecords, expected, loader, schema);
View Full Code Here

  public void compareParquetReadersColumnar(String selection, String table) throws Exception {
    test("alter system set `store.parquet.use_new_reader` = true");
    List<QueryResultBatch> expected = testSqlWithResults("select " + selection + " from " + table);

    RecordBatchLoader loader = new RecordBatchLoader(getAllocator());
    BatchSchema schema = null;

    Map<String, List> expectedSuperVectors = addToCombinedVectorResults(expected, loader, schema);

    test("alter system set `store.parquet.use_new_reader` = false");
View Full Code Here

TOP

Related Classes of org.apache.drill.exec.record.RecordBatchLoader

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.