Package org.apache.drill.exec.record

Examples of org.apache.drill.exec.record.RecordBatchLoader


            client.connect();
            List<QueryResultBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
                    Files.toString(FileUtils.getResourceAsFile("/record/vector/test_timestamp.json"), Charsets.UTF_8)
                            .replace("#{TEST_FILE}", "/test_simple_date.json"));

            RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());

            QueryResultBatch batch = results.get(0);
            assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));

            for (VectorWrapper<?> v : batchLoader) {

                ValueVector.Accessor accessor = v.getValueVector().getAccessor();

                assertEquals(accessor.getObject(0).toString() ,"1970-01-02 10:20:33.000");
                assertEquals(accessor.getObject(1).toString() ,"2008-12-28 11:34:00.129");
                assertEquals(accessor.getObject(2).toString(), "2000-02-27 14:24:00.000");
            }

            batchLoader.clear();
            for(QueryResultBatch b : results){
              b.release();
            }
        }
    }
View Full Code Here


            client.connect();
            List<QueryResultBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
                    Files.toString(FileUtils.getResourceAsFile("/record/vector/test_interval.json"), Charsets.UTF_8)
                            .replace("#{TEST_FILE}", "/test_simple_interval.json"));

            RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());

            QueryResultBatch batch = results.get(0);
            assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));

            Iterator<VectorWrapper<?>> itr = batchLoader.iterator();

            ValueVector.Accessor accessor = itr.next().getValueVector().getAccessor();

            // Check the interval type
            assertEquals((accessor.getObject(0).toString()), ("2 years 2 months 1 day 1:20:35.0"));
            assertEquals((accessor.getObject(1).toString()), ("2 years 2 months 0 days 0:0:0.0"));
            assertEquals((accessor.getObject(2).toString()), ("0 years 0 months 0 days 1:20:35.0"));
            assertEquals((accessor.getObject(3).toString()),("2 years 2 months 1 day 1:20:35.897"));
            assertEquals((accessor.getObject(4).toString()), ("0 years 0 months 0 days 0:0:35.4"));
            assertEquals((accessor.getObject(5).toString()), ("1 year 10 months 1 day 0:-39:-25.0"));

            accessor = itr.next().getValueVector().getAccessor();

            // Check the interval year type
            assertEquals((accessor.getObject(0).toString()), ("2 years 2 months "));
            assertEquals((accessor.getObject(1).toString()), ("2 years 2 months "));
            assertEquals((accessor.getObject(2).toString()), ("0 years 0 months "));
            assertEquals((accessor.getObject(3).toString()), ("2 years 2 months "));
            assertEquals((accessor.getObject(4).toString()), ("0 years 0 months "));
            assertEquals((accessor.getObject(5).toString()), ("1 year 10 months "));


            accessor = itr.next().getValueVector().getAccessor();

            // Check the interval day type
            assertEquals((accessor.getObject(0).toString()), ("1 day 1:20:35.0"));
            assertEquals((accessor.getObject(1).toString()), ("0 days 0:0:0.0"));
            assertEquals((accessor.getObject(2).toString()), ("0 days 1:20:35.0"));
            assertEquals((accessor.getObject(3).toString()), ("1 day 1:20:35.897"));
            assertEquals((accessor.getObject(4).toString()), ("0 days 0:0:35.4"));
            assertEquals((accessor.getObject(5).toString()), ("1 day 0:-39:-25.0"));

            batchLoader.clear();
            for(QueryResultBatch b : results){
              b.release();
            }
        }
    }
View Full Code Here

            client.connect();
            List<QueryResultBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
                    Files.toString(FileUtils.getResourceAsFile("/record/vector/test_all_date_literals.json"), Charsets.UTF_8)
                            .replace("#{TEST_FILE}", "/test_simple_date.json"));

            RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());

            QueryResultBatch batch = results.get(0);
            assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));

            String result[] = {"2008-02-27",
                               "2008-02-27 01:02:03.000",
                               "2008-02-27 01:02:03.000 UTC",
                               "10:11:13.999",
                               "2 years 2 months 3 days 0:1:3.89"};

            int idx = 0;

            for (VectorWrapper<?> v : batchLoader) {

                ValueVector.Accessor accessor = v.getValueVector().getAccessor();

                assertEquals((accessor.getObject(0).toString()), (result[idx++]));
            }

            batchLoader.clear();
            for(QueryResultBatch b : results){
              b.release();
            }
        }
    }
View Full Code Here

            client.connect();
            List<QueryResultBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
                    Files.toString(FileUtils.getResourceAsFile("/record/vector/test_date_add.json"), Charsets.UTF_8)
                            .replace("#{TEST_FILE}", "/test_simple_date.json"));

            RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());

            QueryResultBatch batch = results.get(0);
            assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));

            for (VectorWrapper<?> v : batchLoader) {

                ValueVector.Accessor accessor = v.getValueVector().getAccessor();

                assertEquals((accessor.getObject(0).toString()), ("2008-03-27 00:00:00.000"));


            }

            batchLoader.clear();
            for(QueryResultBatch b : results){
              b.release();
            }
        }
    }
View Full Code Here

      AllocationHelper.allocate(v, 100, 50);
      v.getMutator().generateTestData(100);
    }

    WritableBatch writableBatch = WritableBatch.getBatchNoHV(100, vectors, false);
    RecordBatchLoader batchLoader = new RecordBatchLoader(allocator);
    ByteBuf[] byteBufs = writableBatch.getBuffers();
    int bytes = 0;
    for (int i = 0; i < byteBufs.length; i++) {
      bytes += byteBufs[i].writerIndex();
    }
    ByteBuf byteBuf = allocator.buffer(bytes);
    int index = 0;
    for (int i = 0; i < byteBufs.length; i++) {
      byteBufs[i].readBytes(byteBuf, index, byteBufs[i].writerIndex());
      index += byteBufs[i].writerIndex();
    }
    byteBuf.writerIndex(bytes);

    batchLoader.load(writableBatch.getDef(), byteBuf);
    boolean firstColumn = true;
    int recordCount = 0;
    for (VectorWrapper<?> v : batchLoader) {
      if (firstColumn) {
        firstColumn = false;
      } else {
        System.out.print("\t");
      }
      System.out.print(v.getField().toExpr());
      System.out.print("[");
      System.out.print(v.getField().getType().getMinorType());
      System.out.print("]");
    }

    System.out.println();
    for (int r = 0; r < batchLoader.getRecordCount(); r++) {
      boolean first = true;
      recordCount++;
      for (VectorWrapper<?> v : batchLoader) {
        if (first) {
          first = false;
        } else {
          System.out.print("\t");
        }
        ValueVector.Accessor accessor = v.getValueVector().getAccessor();
        if (v.getField().getType().getMinorType() == TypeProtos.MinorType.VARCHAR) {
          Object obj = accessor.getObject(r);
          if (obj != null)
            System.out.print(accessor.getObject(r));
          else
            System.out.print("NULL");
        } else {
          System.out.print(accessor.getObject(r));
        }
      }
      if (!first) System.out.println();
    }
    assertEquals(100, recordCount);
    batchLoader.clear();
    writableBatch.clear();
  }
View Full Code Here

  public UnorderedReceiverBatch(FragmentContext context, RawFragmentBatchProvider fragProvider, UnorderedReceiver config) throws OutOfMemoryException {
    this.fragProvider = fragProvider;
    this.context = context;
    // In normal case, batchLoader does not require an allocator. However, in case of splitAndTransfer of a value vector,
    // we may need an allocator for the new offset vector. Therefore, here we pass the context's allocator to batchLoader.
    this.batchLoader = new RecordBatchLoader(context.getAllocator());

    this.stats = context.getStats().getOperatorStats(new OpProfileDef(config.getOperatorId(), config.getOperatorType(), 1), null);
    this.stats.setLongStat(Metric.NUM_SENDERS, config.getNumSenders());
    this.config = config;
  }
View Full Code Here

      String path = "/physical_test2.json";
//      String path = "/filter/test1.json";
    List<QueryResultBatch> results = client.runQuery(QueryType.PHYSICAL, Files.toString(FileUtils.getResourceAsFile(path), Charsets.UTF_8));

    // look at records
    RecordBatchLoader batchLoader = new RecordBatchLoader(client.getAllocator());
    int recordCount = 0;
    for (QueryResultBatch batch : results) {

      boolean schemaChanged = batchLoader.load(batch.getHeader().getDef(), batch.getData());
      boolean firstColumn = true;

      // print headers.
      if (schemaChanged) {
        System.out.println("\n\n========NEW SCHEMA=========\n\n");
        for (VectorWrapper<?> value : batchLoader) {

          if (firstColumn) {
            firstColumn = false;
          } else {
            System.out.print("\t");
          }
          System.out.print(value.getField().toExpr());
          System.out.print("[");
          System.out.print(value.getField().getType().getMinorType());
          System.out.print("]");
        }
        System.out.println();
      }

      for (int i = 0; i < batchLoader.getRecordCount(); i++) {
        boolean first = true;
        recordCount++;
        for (VectorWrapper<?> value : batchLoader) {
          if (first) {
            first = false;
          } else {
            System.out.print("\t");
          }
          System.out.print(value.getValueVector().getAccessor().getObject(i));
        }
        if(!first) System.out.println();
      }
      batchLoader.clear();
      batch.release();
    }
    logger.debug("Received results {}", results);
    assertEquals(recordCount, 200);
    }
View Full Code Here

          Files.toString(FileUtils.getResourceAsFile("/physical_json_scan_test1.json"), Charsets.UTF_8)
              .replace("#{TEST_FILE}", FileUtils.getResourceAsFile("/scan_json_test_1.json").toURI().toString())
      );

      // look at records
      RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
      int recordCount = 0;

      //int expectedBatchCount = 2;

      //assertEquals(expectedBatchCount, results.size());

      for (int i = 0; i < results.size(); ++i) {
        QueryResultBatch batch = results.get(i);
        if (i == 0) {
          assertTrue(batch.hasData());
        } else {
          assertFalse(batch.hasData());
          return;
        }

        assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
        boolean firstColumn = true;

        // print headers.
        System.out.println("\n\n========NEW SCHEMA=========\n\n");
        for (VectorWrapper<?> v : batchLoader) {

          if (firstColumn) {
            firstColumn = false;
          } else {
            System.out.print("\t");
          }
          System.out.print(v.getField().toExpr());
          System.out.print("[");
          System.out.print(v.getField().getType().getMinorType());
          System.out.print("]");
        }

        System.out.println();


        for (int r = 0; r < batchLoader.getRecordCount(); r++) {
          boolean first = true;
          recordCount++;
          for (VectorWrapper<?> v : batchLoader) {
            if (first) {
              first = false;
            } else {
              System.out.print("\t");
            }

            ValueVector.Accessor accessor = v.getValueVector().getAccessor();
            System.out.print(accessor.getObject(r));
          }
          if (!first) System.out.println();
        }
        batchLoader.clear();
        batch.release();
      }

      assertEquals(2, recordCount);
    }
View Full Code Here

      bit.run();
      client.connect();
      List<QueryResultBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
          Files.toString(FileUtils.getResourceAsFile("/functions/cast/two_way_implicit_cast.json"), Charsets.UTF_8));

      RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());

      QueryResultBatch batch = results.get(0);
      assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));

      Iterator<VectorWrapper<?>> itr = batchLoader.iterator();

      ValueVector.Accessor intAccessor1 = itr.next().getValueVector().getAccessor();
      ValueVector.Accessor varcharAccessor1 = itr.next().getValueVector().getAccessor();

      for (int i = 0; i < intAccessor1.getValueCount(); i++) {
        System.out.println(intAccessor1.getObject(i));
        assertEquals(intAccessor1.getObject(i), 10);
        System.out.println(varcharAccessor1.getObject(i));
        assertEquals(varcharAccessor1.getObject(i).toString(), "101");
      }

      batchLoader.clear();
      for (QueryResultBatch result : results) {
        result.release();
      }
    }
  }
View Full Code Here

   */
  private String getPlanInString(String sql, String columnName)
      throws Exception {
    List<QueryResultBatch> results = testSqlWithResults(sql);

    RecordBatchLoader loader = new RecordBatchLoader(bit.getContext()
        .getAllocator());
    StringBuilder builder = new StringBuilder();

    for (QueryResultBatch b : results) {
      if (!b.hasData())
        continue;

      loader.load(b.getHeader().getDef(), b.getData());

      VectorWrapper<?> vw = loader.getValueAccessorById(
          NullableVarCharVector.class, //
          loader.getValueVectorId(SchemaPath.getSimplePath(columnName)).getFieldIds() //
          );

      System.out.println(vw.getValueVector().getField().toExpr());
      ValueVector vv = vw.getValueVector();
      for (int i = 0; i < vv.getAccessor().getValueCount(); i++) {
        Object o = vv.getAccessor().getObject(i);
        builder.append(o);
        System.out.println(vv.getAccessor().getObject(i));
      }
      loader.clear();
      b.release();
    }

    return builder.toString();
  }
View Full Code Here

TOP

Related Classes of org.apache.drill.exec.record.RecordBatchLoader

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.