Package org.apache.hadoop.hive.serde2

Examples of org.apache.hadoop.hive.serde2.SerDeException


                break;
            default:
                // Hmm.. not good,
                // the only type expected here is STRUCT, which maps to HCatRecord
                // - anything else is an error. Return null as the inspector.
                throw new SerDeException("TypeInfo [" + typeInfo.getTypeName()
                    + "] was not of struct type - HCatRecord expected struct type, got ["
                    + typeInfo.getCategory().toString() + "]");
            }
            cachedHCatRecordObjectInspectors.put(typeInfo, oi);
        }
View Full Code Here


   
    serdeParams = LazySimpleSerDe.initSerdeParams(
      job, tbl, serdeName);
   
    if (hbaseColumnNames.size() != serdeParams.getColumnNames().size()) {
      throw new SerDeException(serdeName + ": columns has " +
        serdeParams.getColumnNames().size() +
        " elements while hbase.columns.mapping has " +
        hbaseColumnNames.size() + " elements" +
        " (counting the key if implicit)");
    }
   
    // check that the mapping schema is right;
    // we just can make sure that "columnfamily:" is mapped to MAP<String,?>
    for (int i = 0; i < hbaseColumnNames.size(); i++) {
      String hbaseColName = hbaseColumnNames.get(i);
      if (hbaseColName.endsWith(":")) {
        TypeInfo typeInfo = serdeParams.getColumnTypes().get(i);
        if ((typeInfo.getCategory() != Category.MAP) ||
          (((MapTypeInfo) typeInfo).getMapKeyTypeInfo().getTypeName()
            !=  Constants.STRING_TYPE_NAME)) {

          throw new SerDeException(
            serdeName + ": hbase column family '"
            + hbaseColName
            + "' should be mapped to map<string,?> but is mapped to "
            + typeInfo.getTypeName());
        }
View Full Code Here

   * @see SerDe#deserialize(Writable)
   */
  public Object deserialize(Writable rowResult) throws SerDeException {
   
    if (!(rowResult instanceof RowResult)) {
      throw new SerDeException(getClass().getName() + ": expects RowResult!");
    }
   
    RowResult rr = (RowResult)rowResult;
    cachedHBaseRow.init(rr, hbaseColumnNames);
    return cachedHBaseRow;
View Full Code Here

  @Override
  public Writable serialize(Object obj, ObjectInspector objInspector)
      throws SerDeException {
    if (objInspector.getCategory() != Category.STRUCT) {
      throw new SerDeException(getClass().toString()
          + " can only serialize struct types, but we got: "
          + objInspector.getTypeName());
    }

    // Prepare the field ObjectInspectors
    StructObjectInspector soi = (StructObjectInspector) objInspector;
    List<? extends StructField> fields = soi.getAllStructFieldRefs();
    List<Object> list = soi.getStructFieldsDataAsList(obj);
    List<? extends StructField> declaredFields =
      (serdeParams.getRowTypeInfo() != null &&
        ((StructTypeInfo) serdeParams.getRowTypeInfo())
        .getAllStructFieldNames().size() > 0) ?
      ((StructObjectInspector)getObjectInspector()).getAllStructFieldRefs()
      : null;
       
    BatchUpdate batchUpdate;

    try {
      byte [] key =
        serializeField(
          iKey, HBASE_KEY_COL, null, fields, list, declaredFields);
      if (key == null) {
        throw new SerDeException("HBase row key cannot be NULL");
      }
      batchUpdate = new BatchUpdate(key);
      // Serialize each field
      for (int i = 0; i < fields.size(); i++) {
        if (i == iKey) {
          // already processed the key above
          continue;
        }
        String hbaseColumn = hbaseColumnNames.get(i);
        serializeField(
          i, hbaseColumn, batchUpdate, fields, list, declaredFields);
      }
    } catch (IOException e) {
      throw new SerDeException(e);
    }
   
    return batchUpdate;
  }
View Full Code Here

          recordType = null;
        }
      }
    } catch (IOException e) {
      LOG.error("Got exception while trying to instantiate dataset {}", datasetName, e);
      throw new SerDeException(e);
    }
  }
View Full Code Here

    // The object and the objectInspector represent one row of a query result to write into a dataset.
    // Therefore, it is not guaranteed that the object exactly matches the schema of the dataset
    // we want to write into.

    if (!(objectInspector instanceof StructObjectInspector)) {
      throw new SerDeException("Trying to serialize with unknown object inspector type " +
                                 objectInspector.getClass().getName() + ". Expected StructObjectInspector.");
    }

    //overwrite field names (as they get lost by Hive)
    StructTypeInfo structTypeInfo = (StructTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(objectInspector);
View Full Code Here

    outProtocol = outFactory.getProtocol(outTransport);
    inProtocol = inFactory.getProtocol(inTransport);
  }

  public void initialize(Configuration job, Properties tbl) throws SerDeException {
    throw new SerDeException("ThriftByteStreamTypedSerDe is still semi-abstract");
  }
View Full Code Here

                                    TProtocolFactory outFactory) throws SerDeException {
    super(objectType);
    try {
      init(inFactory, outFactory);
    } catch (Exception e) {
      throw new SerDeException(e);
    }
  }
View Full Code Here

  public Object deserialize(Writable field) throws SerDeException {
    Object obj = super.deserialize(field);
    try {
      ((TBase)obj).read(inProtocol);
    } catch (Exception e) {
      throw new SerDeException(e);
    }
    return obj;
  }
View Full Code Here

      TProtocolFactory tp = TReflectionUtils.getProtocolFactoryByName(protoName);
      tsd = new ThriftByteStreamTypedSerDe(recordClass, tp, tp);
     
    } catch (Exception e) {
      throw new SerDeException(e);
    }
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.serde2.SerDeException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.