Package org.apache.hadoop.hive.serde2.objectinspector

Examples of org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector


     * Sets data model properties from the source object.
     * @param dataModel the data model object
     * @param source the source object
     */
    public void set(Object dataModel, Object source) {
        StructObjectInspector inspector = this.sourceInspector;
        StructField[] sources = this.sourceFields;
        PropertyDescriptor[] targets = this.targetProperties;
        ValueDriver[] drivers = propertyDrivers;
        for (int i = 0; i < sources.length; i++) {
            Object value = inspector.getStructFieldData(source, sources[i]);
            ValueOption<?> option = targets[i].extract(dataModel);
            drivers[i].set(option, value);
        }
    }
View Full Code Here


                        "Loading ORCFile metadata ({0}): {1}",
                        descriptor.getDataModelClass().getSimpleName(),
                        path));
            }
            Reader orc = OrcFile.createReader(fileSystem, path);
            StructObjectInspector sourceInspector = (StructObjectInspector) orc.getObjectInspector();
            driver = new DataModelDriver(descriptor, sourceInspector, configuration);
            if (LOG.isInfoEnabled()) {
                LOG.info(MessageFormat.format(
                        "Loading ORCFile contents ({0}): path={1}, range={2}+{3}",
                        descriptor.getDataModelClass().getSimpleName(),
View Full Code Here

        return reader;
    }

    private boolean[] computeAvailableColumns(Reader orc) {
        assert driver != null;
        StructObjectInspector inspector = driver.getSourceInspector();
        List<? extends StructField> all = inspector.getAllStructFieldRefs();
        Set<StructField> projected = new HashSet<StructField>(driver.getSourceFields());
        // the first type must be a root type
        boolean[] availables = new boolean[all.size() + 1];
        availables[0] = true;
        for (int i = 0, n = all.size(); i < n; i++) {
View Full Code Here

    if (dpColNames != null && dpColNames.size() > 0) {
      this.bDynParts = true;
      assert inputObjInspectors.length == 1: "FileSinkOperator should have 1 parent, but it has "
        + inputObjInspectors.length;
      StructObjectInspector soi = (StructObjectInspector) inputObjInspectors[0];
      // remove the last dpMapping.size() columns from the OI
      List<? extends StructField> fieldOI = soi.getAllStructFieldRefs();
      ArrayList<ObjectInspector> newFieldsOI = new ArrayList<ObjectInspector>();
      ArrayList<String> newFieldsName = new ArrayList<String>();
      this.dpStartCol = 0;
      for (StructField sf: fieldOI) {
        String fn = sf.getFieldName();
View Full Code Here

    String partName = String.valueOf(partSpec);
    // HiveConf.setVar(hconf, HiveConf.ConfVars.HIVETABLENAME, tableName);
    // HiveConf.setVar(hconf, HiveConf.ConfVars.HIVEPARTITIONNAME, partName);
    Deserializer deserializer = (Deserializer) sdclass.newInstance();
    deserializer.initialize(hconf, tblProps);
    StructObjectInspector rawRowObjectInspector = (StructObjectInspector) deserializer
        .getObjectInspector();

    MapOpCtx opCtx = null;
    // Next check if this table has partitions and if so
    // get the list of partition names as well as allocate
    // the serdes for the partition columns
    String pcols = tblProps
        .getProperty(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_PARTITION_COLUMNS);
    // Log LOG = LogFactory.getLog(MapOperator.class.getName());
    if (pcols != null && pcols.length() > 0) {
      String[] partKeys = pcols.trim().split("/");
      List<String> partNames = new ArrayList<String>(partKeys.length);
      Object[] partValues = new Object[partKeys.length];
      List<ObjectInspector> partObjectInspectors = new ArrayList<ObjectInspector>(
          partKeys.length);
      for (int i = 0; i < partKeys.length; i++) {
        String key = partKeys[i];
        partNames.add(key);
        // Partitions do not exist for this table
        if (partSpec == null) {
          partValues[i] = new Text();
        } else {
          partValues[i] = new Text(partSpec.get(key));
        }
        partObjectInspectors
            .add(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
      }
      StructObjectInspector partObjectInspector = ObjectInspectorFactory
          .getStandardStructObjectInspector(partNames, partObjectInspectors);

      Object[] rowWithPart = new Object[2];
      rowWithPart[1] = partValues;
      StructObjectInspector rowObjectInspector = ObjectInspectorFactory
          .getUnionStructObjectInspector(Arrays
          .asList(new StructObjectInspector[] {rawRowObjectInspector, partObjectInspector}));
      // LOG.info("dump " + tableName + " " + partName + " " +
      // rowObjectInspector.getTypeName());
      opCtx = new MapOpCtx(true, rowObjectInspector, rawRowObjectInspector ,partObjectInspector,rowWithPart, deserializer);
View Full Code Here

            if (!done) {
              deserializer = opCtxMap.get(inp).getDeserializer();
              isPartitioned = opCtxMap.get(inp).isPartitioned();
              rowWithPart = opCtxMap.get(inp).getRowWithPart();
              rowObjectInspector = opCtxMap.get(inp).getRowObjectInspector();
              StructObjectInspector rawRowObjectInspector = opCtxMap.get(inp).rawRowObjectInspector;
              StructObjectInspector partObjectInspector = opCtxMap.get(inp).partObjectInspector;
              if (op instanceof TableScanOperator) {
                TableScanOperator tsOp = (TableScanOperator) op;
                TableScanDesc tsDesc = tsOp.getConf();
                if(tsDesc != null) {
                  this.vcs = tsDesc.getVirtualCols();
                  if (vcs != null && vcs.size() > 0) {
                    this.hasVC = true;
                    List<String> vcNames = new ArrayList<String>(vcs.size());
                    this.vcValues = new Writable[vcs.size()];
                    List<ObjectInspector> vcsObjectInspectors = new ArrayList<ObjectInspector>(vcs.size());
                    for (int i = 0; i < vcs.size(); i++) {
                      VirtualColumn vc = vcs.get(i);
                      vcsObjectInspectors.add(
                          PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
                              ((PrimitiveTypeInfo) vc.getTypeInfo()).getPrimitiveCategory()));
                      vcNames.add(vc.getName());
                    }
                    StructObjectInspector vcStructObjectInspector = ObjectInspectorFactory
                        .getStandardStructObjectInspector(vcNames,
                            vcsObjectInspectors);
                    if (isPartitioned) {
                      this.rowWithPartAndVC = new Object[3];
                      this.rowWithPartAndVC[1] = this.rowWithPart[1];
View Full Code Here

    String[] names = tableName.split("\\.");
    String last_name = names[names.length - 1];
    for (int i = 1; i < names.length; i++) {

      if (oi instanceof StructObjectInspector) {
        StructObjectInspector soi = (StructObjectInspector) oi;
        StructField sf = soi.getStructFieldRef(names[i]);
        if (sf == null) {
          throw new MetaException("Invalid Field " + names[i]);
        } else {
          oi = sf.getFieldObjectInspector();
        }
View Full Code Here

          "input object inspector is not struct";

        writable = new ArrayList<Object>(conf.getPartColumns().size());
        values = new ArrayList<String>(conf.getPartColumns().size());
        dpStartCol = 0;
        StructObjectInspector soi = (StructObjectInspector) inputObjInspectors[0];
        for (StructField sf: soi.getAllStructFieldRefs()) {
          String fn = sf.getFieldName();
          if (!conf.getPartColumns().contains(fn)) {
            dpStartCol++;
          } else {
            break;
View Full Code Here

    LinkedHashSet<Partition> true_parts = new LinkedHashSet<Partition>();
    LinkedHashSet<Partition> unkn_parts = new LinkedHashSet<Partition>();
    LinkedHashSet<Partition> denied_parts = new LinkedHashSet<Partition>();

    try {
      StructObjectInspector rowObjectInspector = (StructObjectInspector) tab
          .getDeserializer().getObjectInspector();
      Object[] rowWithPart = new Object[2];

      if (tab.isPartitioned()) {
        LOG.debug("tabname = " + tab.getTableName() + " is partitioned");
View Full Code Here

          getTypeInfoFromObjectInspector(moi.getMapKeyObjectInspector()),
          getTypeInfoFromObjectInspector(moi.getMapValueObjectInspector()));
      break;
    }
    case STRUCT: {
      StructObjectInspector soi = (StructObjectInspector) oi;
      List<? extends StructField> fields = soi.getAllStructFieldRefs();
      List<String> fieldNames = new ArrayList<String>(fields.size());
      List<TypeInfo> fieldTypeInfos = new ArrayList<TypeInfo>(fields.size());
      for (StructField f : fields) {
        fieldNames.add(f.getFieldName());
        fieldTypeInfos.add(getTypeInfoFromObjectInspector(f
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.