Examples of ObjectInspector


Examples of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector

   *
   * @throws Exception
   */
  @Test
  public void testSkipWithEmptyArrayInEnd() throws Exception {
    ObjectInspector inspector;
    List<String> emptyList = Collections.emptyList();
    synchronized (TestOrcFile.class) {
      inspector = ObjectInspectorFactory.getReflectionObjectInspector
          (StringListWithId.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
    }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector

  /**
   * Tests a writing a stripe with an integer column, which enters low memory mode before the first
   * index stride is complete.
   */
  public void testIntEnterLowMemoryModeInFirstStride() throws Exception {
    ObjectInspector inspector;
    synchronized (TestOrcFile.class) {
      inspector = ObjectInspectorFactory.getReflectionObjectInspector
          (IntStruct.class,
              ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
    }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector

  /**
   * Tests a writing a stripe with a string column, which enters low memory mode before the first
   * index stride is complete.
   */
  public void testStringEnterLowMemoryModeInFirstStride() throws Exception {
    ObjectInspector inspector;
    synchronized (TestOrcFile.class) {
      inspector = ObjectInspectorFactory.getReflectionObjectInspector
          (StringStruct.class,
              ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
    }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector

    public void close(boolean b) throws IOException {
      // if we haven't written any rows, we need to create a file with a
      // generic schema.
      if (writer == null) {
        // a row with no columns
        ObjectInspector inspector = ObjectInspectorFactory.
            getStandardStructObjectInspector(new ArrayList<String>(),
                new ArrayList<ObjectInspector>());
        writer = OrcFile.createWriter(fs, path, this.conf, inspector,
            stripeSize, compress, compressionSize, rowIndexStride);
      }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector

    checkOutput(expectedFileUrl.getPath(), outputFilename);
  }

  @Test
  public void testDump() throws Exception {
    ObjectInspector inspector;
    synchronized (TestOrcFile.class) {
      inspector = ObjectInspectorFactory.getReflectionObjectInspector
          (MyRecord.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
    }
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector

    writer.close();
    checkOutput("orc-file-dump.out");
  }

  private void testDictionary(Configuration conf, String expectedOutputFilename) throws Exception {
    ObjectInspector inspector;
    synchronized (TestOrcFile.class) {
      inspector = ObjectInspectorFactory.getReflectionObjectInspector
          (MyRecord.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
    }
    // Turn off using the approximate entropy heuristic to turn off dictionary encoding
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector

  //Test that if the number of distinct characters in distinct strings is less than the configured
  // threshold dictionary encoding is turned off.  If dictionary encoding is turned off the length
  // of the dictionary stream for the column will be 0 in the ORC file dump.
  @Test
  public void testEntropyThreshold() throws Exception {
    ObjectInspector inspector;
    synchronized (TestOrcFile.class) {
      inspector = ObjectInspectorFactory.getReflectionObjectInspector
          (MyRecord.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
    }
    OrcConf.setFloatVar(conf, OrcConf.ConfVars.HIVE_ORC_ENTROPY_KEY_STRING_SIZE_THRESHOLD, 1);
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector

  @Test
  /**
   * Tests writing a Map containing only an entry with a null key.
   */
  public void testIntegerEnterLowMemoryModeAndOnNotCarriedOverStripe() throws Exception {
    ObjectInspector inspector;
    synchronized (TestMapTreeWriter.class) {
      inspector = new MapStructObjectInspector();
    }
    ReaderWriterProfiler.setProfilerOptions(conf);
    Writer writer = new WriterImpl(fs, testFilePath, conf, inspector,
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector

   * @throws SerDeException
   * @throws MetaException
   */
  public static List<FieldSchema> getFieldsFromDeserializer(String tableName,
      Deserializer deserializer) throws SerDeException, MetaException {
    ObjectInspector oi = deserializer.getObjectInspector();
    String[] names = tableName.split("\\.");
    String last_name = names[names.length - 1];
    for (int i = 1; i < names.length; i++) {

      if (oi instanceof StructObjectInspector) {
        StructObjectInspector soi = (StructObjectInspector) oi;
        StructField sf = soi.getStructFieldRef(names[i]);
        if (sf == null) {
          throw new MetaException("Invalid Field " + names[i]);
        } else {
          oi = sf.getFieldObjectInspector();
        }
      } else if (oi instanceof ListObjectInspector
          && names[i].equalsIgnoreCase("$elem$")) {
        ListObjectInspector loi = (ListObjectInspector) oi;
        oi = loi.getListElementObjectInspector();
      } else if (oi instanceof MapObjectInspector
          && names[i].equalsIgnoreCase("$key$")) {
        MapObjectInspector moi = (MapObjectInspector) oi;
        oi = moi.getMapKeyObjectInspector();
      } else if (oi instanceof MapObjectInspector
          && names[i].equalsIgnoreCase("$value$")) {
        MapObjectInspector moi = (MapObjectInspector) oi;
        oi = moi.getMapValueObjectInspector();
      } else {
        throw new MetaException("Unknown type for " + names[i]);
      }
    }

    ArrayList<FieldSchema> str_fields = new ArrayList<FieldSchema>();
    // rules on how to recurse the ObjectInspector based on its type
    if (oi.getCategory() != Category.STRUCT) {
      str_fields.add(new FieldSchema(last_name, oi.getTypeName(),
          "from deserializer"));
    } else {
      List<? extends StructField> fields = ((StructObjectInspector) oi)
          .getAllStructFieldRefs();
      for (int i = 0; i < fields.size(); i++) {
View Full Code Here

Examples of org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector

   * Returns the standard object inspector that can be used to translate an
   * object of that typeInfo to a standard object type.
   */
  public static ObjectInspector getStandardWritableObjectInspectorFromTypeInfo(
      TypeInfo typeInfo) {
    ObjectInspector result = cachedStandardObjectInspector.get(typeInfo);
    if (result == null) {
      switch (typeInfo.getCategory()) {
      case PRIMITIVE: {
        result = PrimitiveObjectInspectorFactory
            .getPrimitiveWritableObjectInspector(((PrimitiveTypeInfo) typeInfo)
            .getPrimitiveCategory());
        break;
      }
      case LIST: {
        ObjectInspector elementObjectInspector =
            getStandardWritableObjectInspectorFromTypeInfo(((ListTypeInfo) typeInfo)
            .getListElementTypeInfo());
        result = ObjectInspectorFactory
            .getStandardListObjectInspector(elementObjectInspector);
        break;
      }
      case MAP: {
        MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo;
        ObjectInspector keyObjectInspector =
            getStandardWritableObjectInspectorFromTypeInfo(mapTypeInfo.getMapKeyTypeInfo());
        ObjectInspector valueObjectInspector =
            getStandardWritableObjectInspectorFromTypeInfo(mapTypeInfo.getMapValueTypeInfo());
        result = ObjectInspectorFactory.getStandardMapObjectInspector(
            keyObjectInspector, valueObjectInspector);
        break;
      }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.