Package org.apache.hadoop.hive.serde2

Examples of org.apache.hadoop.hive.serde2.SerDeException


      if(t.getLength()==0)
        return null;
      byteArrayRef.setData(t.getBytes());
      cachedLazyBinaryStruct.init(byteArrayRef, 0, t.getLength());
    } else {
      throw new SerDeException(getClass().toString() 
          + ": expects either BytesWritable or Text object!");
    }
    return cachedLazyBinaryStruct;
  }
View Full Code Here


      for (int i=0; i<columnNames.size(); i++) {
        row.set(i, deserialize(inputByteBuffer, columnTypes.get(i),
            columnSortOrderIsDesc[i], row.get(i)));
      }
    } catch (IOException e) {
      throw new SerDeException(e);
    }
   
    return row;
  }
View Full Code Here

  @Override
  public Writable serialize(Object obj, ObjectInspector objInspector)
      throws SerDeException {
    // make sure it is a struct record
    if (objInspector.getCategory() != Category.STRUCT) {
      throw new SerDeException(getClass().toString()
          + " can only serialize struct types, but we got: "
          + objInspector.getTypeName());
    }
   
    serializeByteStream.reset();   
View Full Code Here

   * Deserialize a row from the Writable to a LazyObject.
   */
  public Object deserialize(Writable blob) throws SerDeException {

    if (!(blob instanceof BytesRefArrayWritable)) {
      throw new SerDeException(getClass().toString()
          + ": expects BytesRefArrayWritable!");
    }

    BytesRefArrayWritable cols = (BytesRefArrayWritable) blob;
    cachedLazyStruct.init(cols);
View Full Code Here

   */
  public Writable serialize(Object obj, ObjectInspector objInspector)
      throws SerDeException {

    if (objInspector.getCategory() != Category.STRUCT) {
      throw new SerDeException(getClass().toString()
          + " can only serialize struct types, but we got: "
          + objInspector.getTypeName());
    }

    // Prepare the field ObjectInspectors
    StructObjectInspector soi = (StructObjectInspector) objInspector;
    List<? extends StructField> fields = soi.getAllStructFieldRefs();
    List<Object> list = soi.getStructFieldsDataAsList(obj);
    List<? extends StructField> declaredFields = (serdeParams.getRowTypeInfo() != null && ((StructTypeInfo) serdeParams
        .getRowTypeInfo()).getAllStructFieldNames().size() > 0) ? ((StructObjectInspector) getObjectInspector())
        .getAllStructFieldRefs()
        : null;

    try {
      // used for avoid extra byte copy
      serializeStream.reset();
      int count = 0;
      // Serialize each field
      for (int i = 0; i < fields.size(); i++) {
        // Get the field objectInspector and the field object.
        ObjectInspector foi = fields.get(i).getFieldObjectInspector();
        Object f = (list == null ? null : list.get(i));

        if (declaredFields != null && i >= declaredFields.size()) {
          throw new SerDeException("Error: expecting " + declaredFields.size()
              + " but asking for field " + i + "\n" + "data=" + obj + "\n"
              + "tableType=" + serdeParams.getRowTypeInfo().toString() + "\n"
              + "dataType="
              + TypeInfoUtils.getTypeInfoFromObjectInspector(objInspector));
        }

        // If the field that is passed in is NOT a primitive, and either the
        // field is not declared (no schema was given at initialization), or
        // the field is declared as a primitive in initialization, serialize
        // the data to JSON string. Otherwise serialize the data in the
        // delimited way.
        if (!foi.getCategory().equals(Category.PRIMITIVE)
            && (declaredFields == null || declaredFields.get(i)
                .getFieldObjectInspector().getCategory().equals(
                    Category.PRIMITIVE))) {
          LazySimpleSerDe.serialize(serializeStream,
              SerDeUtils.getJSONString(f, foi),
              PrimitiveObjectInspectorFactory.javaStringObjectInspector,
              serdeParams.getSeparators(), 1, serdeParams.getNullSequence(),
              serdeParams.isEscaped(), serdeParams.getEscapeChar(),
              serdeParams.getNeedsEscape());
        } else {
          LazySimpleSerDe.serialize(serializeStream, f, foi,
              serdeParams.getSeparators(), 1, serdeParams.getNullSequence(),
              serdeParams.isEscaped(), serdeParams.getEscapeChar(),
              serdeParams.getNeedsEscape());
        }

        field[i].set(serializeStream.getData(), count, serializeStream
            .getCount()
            - count);
        count = serializeStream.getCount();
      }
    } catch (IOException e) {
      throw new SerDeException(e);
    }
    return serializeCache;
  }
View Full Code Here

    serdeParams.columnTypes = TypeInfoUtils
        .getTypeInfosFromTypeString(columnTypeProperty);

    if (serdeParams.columnNames.size() != serdeParams.columnTypes.size()) {
      throw new SerDeException(serdeName + ": columns has "
          + serdeParams.columnNames.size()
          + " elements while columns.types has "
          + serdeParams.columnTypes.size() + " elements!");
    }
View Full Code Here

    } else if (field instanceof Text) {
      Text t = (Text)field;
      byteArrayRef.setData(t.getBytes());
      cachedLazyStruct.init(byteArrayRef, 0, t.getLength());
    } else {
      throw new SerDeException(getClass().toString() 
          + ": expects either BytesWritable or Text object!");
    }
    return cachedLazyStruct;
  }
View Full Code Here

   */
  public Writable serialize(Object obj, ObjectInspector objInspector)
      throws SerDeException {

    if (objInspector.getCategory() != Category.STRUCT) {
      throw new SerDeException(getClass().toString()
          + " can only serialize struct types, but we got: "
          + objInspector.getTypeName());
    }

    // Prepare the field ObjectInspectors
    StructObjectInspector soi = (StructObjectInspector)objInspector;
    List<? extends StructField> fields = soi.getAllStructFieldRefs();
    List<Object> list = soi.getStructFieldsDataAsList(obj);
    List<? extends StructField> declaredFields =(serdeParams.rowTypeInfo != null && ((StructTypeInfo) serdeParams.rowTypeInfo)
        .getAllStructFieldNames().size()>0)? ((StructObjectInspector)getObjectInspector())
        .getAllStructFieldRefs()
        : null;

    serializeStream.reset();

    try {
      // Serialize each field
      for (int i=0; i<fields.size(); i++) {
        // Append the separator if needed.
        if (i>0) {
          serializeStream.write(serdeParams.separators[0]);
        }
        // Get the field objectInspector and the field object.
        ObjectInspector foi = fields.get(i).getFieldObjectInspector();
        Object f = (list == null ? null : list.get(i));

        if (declaredFields != null && i >= declaredFields.size()) {
          throw new SerDeException(
              "Error: expecting " + declaredFields.size()
              + " but asking for field " + i + "\n" + "data=" + obj + "\n"
              + "tableType=" + serdeParams.rowTypeInfo.toString() + "\n"
              + "dataType="
              + TypeInfoUtils.getTypeInfoFromObjectInspector(objInspector));
        }
       
        // If the field that is passed in is NOT a primitive, and either the
        // field is not declared (no schema was given at initialization), or
        // the field is declared as a primitive in initialization, serialize
        // the data to JSON string.  Otherwise serialize the data in the
        // delimited way.
        if (!foi.getCategory().equals(Category.PRIMITIVE)
            && (declaredFields == null ||
                declaredFields.get(i).getFieldObjectInspector().getCategory()
                .equals(Category.PRIMITIVE) || useJSONSerialize)) {
          serialize(serializeStream, SerDeUtils.getJSONString(f, foi),
              PrimitiveObjectInspectorFactory.javaStringObjectInspector,
              serdeParams.separators, 1, serdeParams.nullSequence,
              serdeParams.escaped, serdeParams.escapeChar,
              serdeParams.needsEscape);
        } else {
          serialize(serializeStream, f, foi, serdeParams.separators, 1,
              serdeParams.nullSequence, serdeParams.escaped, serdeParams.escapeChar,
              serdeParams.needsEscape);
        }
      }
    } catch (IOException e) {
      throw new SerDeException(e);
    }
    // TODO: The copy of data is unnecessary, but there is no work-around
    // since we cannot directly set the private byte[] field inside Text.
    serializeCache.set(serializeStream.getData(), 0,
        serializeStream.getCount());
View Full Code Here

    numColumns = columnNames.size();
   
    // All columns have to be of type STRING.
    for (int c = 0; c < numColumns; c++) {
      if (!columnTypes.get(c).equals(TypeInfoFactory.stringTypeInfo)) {
        throw new SerDeException(getClass().getName()
            + " only accepts string columns, but column[" + c
            + "] named " + columnNames.get(c) + " has type "
            + columnTypes.get(c));
      }
    }
View Full Code Here

 
  @Override
  public Object deserialize(Writable blob) throws SerDeException {

    if (inputPattern == null) {
      throw new SerDeException("This table does not have serde property \"input.regex\"!");
    }
    Text rowText = (Text)blob;
   
    Matcher m = inputPattern.matcher(rowText.toString());
   
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.serde2.SerDeException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.