Package org.apache.hadoop.hive.serde2.objectinspector

Examples of org.apache.hadoop.hive.serde2.objectinspector.StructField


    String last_name = names[names.length - 1];
    for (int i = 1; i < names.length; i++) {

      if (oi instanceof StructObjectInspector) {
        StructObjectInspector soi = (StructObjectInspector) oi;
        StructField sf = soi.getStructFieldRef(names[i]);
        if (sf == null) {
          throw new MetaException("Invalid Field " + names[i]);
        } else {
          oi = sf.getFieldObjectInspector();
        }
      } else if (oi instanceof ListObjectInspector
          && names[i].equalsIgnoreCase("$elem$")) {
        ListObjectInspector loi = (ListObjectInspector) oi;
        oi = loi.getListElementObjectInspector();
      } else if (oi instanceof MapObjectInspector
          && names[i].equalsIgnoreCase("$key$")) {
        MapObjectInspector moi = (MapObjectInspector) oi;
        oi = moi.getMapKeyObjectInspector();
      } else if (oi instanceof MapObjectInspector
          && names[i].equalsIgnoreCase("$value$")) {
        MapObjectInspector moi = (MapObjectInspector) oi;
        oi = moi.getMapValueObjectInspector();
      } else {
        throw new MetaException("Unknown type for " + names[i]);
      }
    }

    ArrayList<FieldSchema> str_fields = new ArrayList<FieldSchema>();
    // rules on how to recurse the ObjectInspector based on its type
    if (oi.getCategory() != Category.STRUCT) {
      str_fields.add(new FieldSchema(last_name, oi.getTypeName(),
          FROM_SERIALIZER));
    } else {
      List<? extends StructField> fields = ((StructObjectInspector) oi)
          .getAllStructFieldRefs();
      for (int i = 0; i < fields.size(); i++) {
        StructField structField = fields.get(i);
        String fieldName = structField.getFieldName();
        String fieldTypeName = structField.getFieldObjectInspector().getTypeName();
        String fieldComment = determineFieldComment(structField.getFieldComment());

        str_fields.add(new FieldSchema(fieldName, fieldTypeName, fieldComment));
      }
    }
    return str_fields;
View Full Code Here


            names[i] = column.getName();
            types[i] = column.getType();
            hiveTypes[i] = column.getHiveType();

            if (!column.isPartitionKey()) {
                StructField field = rowInspector.getStructFieldRef(column.getName());
                structFields[i] = field;
                fieldInspectors[i] = field.getFieldObjectInspector();
            }

            isPartitionColumn[i] = column.isPartitionKey();
        }
View Full Code Here

      throw new IOException(e);
    }

    for (int i = 0; i < parserData.columnIndexes.length; ++i) {
      int columnIndex = parserData.columnIndexes[i];
      StructField structField = parserData.structFields[columnIndex];

      Object fieldData = parserData.inspector.getStructFieldData(rowData, structField);
      if (fieldData == null) {
        arrayRecord.setNull(columnIndex, true);
        continue;
      }

      if (arrayRecord.getHiveType(columnIndex).isCollection()) {
        ObjectInspector fieldInspector = structField.getFieldObjectInspector();
        Object parsed = ObjectInspectorUtils.copyToStandardJavaObject(fieldData, fieldInspector);
        arrayRecord.setObject(columnIndex, parsed);
      } else {
        parsePrimitive(arrayRecord, columnIndex, fieldData);
      }
View Full Code Here

  private StructObjectInspector soi;
 
  @Override
  public Object get(int fieldNum) {
    try {
      StructField fref = soi.getAllStructFieldRefs().get(fieldNum);
      return HCatRecordSerDe.serializeField(
          soi.getStructFieldData(o, fref),
          fref.getFieldObjectInspector());
    } catch (SerDeException e) {
      throw new IllegalStateException("SerDe Exception deserializing",e);
    }
  }
View Full Code Here

    // reduce KEY has union field as the last field if there are distinct
    // aggregates in group-by.
    List<? extends StructField> sfs =
      ((StandardStructObjectInspector) rowInspector).getAllStructFieldRefs();
    if (sfs.size() > 0) {
      StructField keyField = sfs.get(0);
      if (keyField.getFieldName().toUpperCase().equals(
          Utilities.ReduceField.KEY.name())) {
        ObjectInspector keyObjInspector = keyField.getFieldObjectInspector();
        if (keyObjInspector instanceof StandardStructObjectInspector) {
          List<? extends StructField> keysfs =
            ((StandardStructObjectInspector) keyObjInspector).getAllStructFieldRefs();
          if (keysfs.size() > 0) {
            // the last field is the union field, if any
            StructField sf = keysfs.get(keysfs.size() - 1);
            if (sf.getFieldObjectInspector().getCategory().equals(
                ObjectInspector.Category.UNION)) {
              unionExprEval = ExprNodeEvaluatorFactory.get(
                new ExprNodeColumnDesc(TypeInfoUtils.getTypeInfoFromObjectInspector(
                sf.getFieldObjectInspector()),
                keyField.getFieldName() + "." + sf.getFieldName(), null,
                false));
              unionExprEval.initialize(rowInspector);
            }
          }
        }
View Full Code Here

      }

      // number of rows for the key in the given table
      int sz = storage.get(alias).size();
      StructObjectInspector soi = (StructObjectInspector) inputObjInspectors[tag];
      StructField sf = soi.getStructFieldRef(Utilities.ReduceField.KEY
          .toString());
      Object keyObject = soi.getStructFieldData(row, sf);

      // Are we consuming too much memory
      if (alias == numAliases - 1 && !(handleSkewJoin && skewJoinKeyContext.currBigKeyTag >= 0)) {
        if (sz == joinEmitInterval) {
          // The input is sorted by alias, so if we are already in the last join
          // operand,
          // we can emit some results now.
          // Note this has to be done before adding the current row to the
          // storage,
          // to preserve the correctness for outer joins.
          checkAndGenObject();
          storage.get(alias).clear();
        }
      } else {
        if (sz == nextSz) {
          // Output a warning if we reached at least 1000 rows for a join
          // operand
          // We won't output a warning for the last join operand since the size
          // will never goes to joinEmitInterval.
          LOG.warn("table " + alias + " has " + sz + " rows for join key "
              + keyObject);
          nextSz = getNextSize(nextSz);
        }
      }

      // Add the value to the vector
      storage.get(alias).add(nr);
      // if join-key is null, process each row in different group.
      if (SerDeUtils.hasAnyNullObject(keyObject, sf.getFieldObjectInspector())) {
        endGroup();
        startGroup();
      }
    } catch (Exception e) {
      e.printStackTrace();
View Full Code Here

        cdop.retrieve(io);
        System.out.println("[" + i + "] io.o=" + io.o);
        System.out.println("[" + i + "] io.oi=" + io.oi);
        StructObjectInspector soi = (StructObjectInspector) io.oi;
        assert (soi != null);
        StructField a = soi.getStructFieldRef("a");
        StructField b = soi.getStructFieldRef("b");
        assertEquals("" + (i + 1), ((PrimitiveObjectInspector) a
            .getFieldObjectInspector()).getPrimitiveJavaObject(soi
            .getStructFieldData(io.o, a)));
        assertEquals((i) + "1", ((PrimitiveObjectInspector) b
            .getFieldObjectInspector()).getPrimitiveJavaObject(soi
            .getStructFieldData(io.o, b)));
      }

      System.out.println("Script Operator ok");
View Full Code Here

    stdObjFields = new ArrayList<StructField>();
    ArrayList<ColumnDef> cols = tabDef.getWindow().getPartDef().getColumns();
    for(ColumnDef colDef : cols)
    {
      String colName = colDef.getAlias();
      StructField f = OI.getStructFieldRef(colName);
      StructField stdF = stdOI.getStructFieldRef(colName);
      partColumns.add(colName);
      objFields.add(f);
      stdObjFields.add(stdF);
    }
  }
View Full Code Here

      if (i == numPCols)
      {
        grpSize = outputByteBuffer.getLength();
      }
      String sCol = sortCols[i];
      StructField iField = inputOI.getStructFieldRef(sCol);
      Object val = inputOI.getStructFieldData(obj, iField);
      ObjectInspector fieldOI = iField.getFieldObjectInspector();
      // wkey.setElement(val, iField.getFieldObjectInspector(), si++);
      serialize(outputByteBuffer, val, fieldOI, ordering[i]);
      if ( i < numPCols)
      {
        hashCode = hashCode * 31
 
View Full Code Here

    @Override
    public String getTypeName() {
      StringBuilder buffer = new StringBuilder();
      buffer.append("struct<");
      for(int i=0; i < fields.size(); ++i) {
        StructField field = fields.get(i);
        if (i != 0) {
          buffer.append(",");
        }
        buffer.append(field.getFieldName());
        buffer.append(":");
        buffer.append(field.getFieldObjectInspector().getTypeName());
      }
      buffer.append(">");
      return buffer.toString();
    }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.serde2.objectinspector.StructField

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.