Package org.apache.hadoop.hive.serde2

Examples of org.apache.hadoop.hive.serde2.SerDeException


    } else if (field instanceof Text) {
      Text t = (Text) field;
      byteArrayRef.setData(t.getBytes());
      cachedLazyStruct.init(byteArrayRef, 0, t.getLength());
    } else {
      throw new SerDeException(getClass().toString()
          + ": expects either BytesWritable or Text object!");
    }
    lastOperationSerialize = false;
    lastOperationDeserialize = true;
    return cachedLazyStruct;
View Full Code Here


   */
  public Writable serialize(Object obj, ObjectInspector objInspector)
      throws SerDeException {

    if (objInspector.getCategory() != Category.STRUCT) {
      throw new SerDeException(getClass().toString()
          + " can only serialize struct types, but we got: "
          + objInspector.getTypeName());
    }

    // Prepare the field ObjectInspectors
    StructObjectInspector soi = (StructObjectInspector) objInspector;
    List<? extends StructField> fields = soi.getAllStructFieldRefs();
    List<Object> list = soi.getStructFieldsDataAsList(obj);
    List<? extends StructField> declaredFields = (serdeParams.rowTypeInfo != null && ((StructTypeInfo) serdeParams.rowTypeInfo)
        .getAllStructFieldNames().size() > 0) ? ((StructObjectInspector) getObjectInspector())
        .getAllStructFieldRefs()
        : null;

    serializeStream.reset();
    serializedSize = 0;

    // Serialize each field
    for (int i = 0; i < fields.size(); i++) {
      // Append the separator if needed.
      if (i > 0) {
        serializeStream.write(serdeParams.separators[0]);
      }
      // Get the field objectInspector and the field object.
      ObjectInspector foi = fields.get(i).getFieldObjectInspector();
      Object f = (list == null ? null : list.get(i));

      if (declaredFields != null && i >= declaredFields.size()) {
        throw new SerDeException("Error: expecting " + declaredFields.size()
            + " but asking for field " + i + "\n" + "data=" + obj + "\n"
            + "tableType=" + serdeParams.rowTypeInfo.toString() + "\n"
            + "dataType="
            + TypeInfoUtils.getTypeInfoFromObjectInspector(objInspector));
      }
View Full Code Here

      SerDeParameters serdeParams) throws SerDeException {
    try {
      serialize(out, obj, objInspector, serdeParams.separators, 1, serdeParams.nullSequence,
          serdeParams.escaped, serdeParams.escapeChar, serdeParams.needsEscape);
    } catch (IOException e) {
      throw new SerDeException(e);
    }
  }
View Full Code Here

      for (int i = 0; i < columnNames.size(); i++) {
        row.set(i, deserialize(inputByteBuffer, columnTypes.get(i),
            columnSortOrderIsDesc[i], row.get(i)));
      }
    } catch (IOException e) {
      throw new SerDeException(e);
    }

    return row;
  }
View Full Code Here

    try {
      match.matches();
      c.bucketowner = match.group(t++);
      c.bucketname = match.group(t++);
    } catch (Exception e) {
      throw new SerDeException("S3 Log Regex did not match:" + row, e);
    }
    c.rdatetime = match.group(t++);

    // Should we convert the datetime to the format Hive understands by default
    // - either yyyy-mm-dd HH:MM:SS or seconds since epoch?
View Full Code Here

    if (field instanceof BytesWritable) {
      BytesWritable b = (BytesWritable) field;
      try {
        row = Text.decode(b.get(), 0, b.getSize());
      } catch (CharacterCodingException e) {
        throw new SerDeException(e);
      }
    } else if (field instanceof Text) {
      row = field.toString();
    }
    try {
      deserialize(deserializeCache, row);
      return deserializeCache;
    } catch (ClassCastException e) {
      throw new SerDeException(this.getClass().getName()
          + " expects Text or BytesWritable", e);
    } catch (Exception e) {
      throw new SerDeException(e);
    }
  }
View Full Code Here

    numColumns = columnNames.size();

    // All columns have to be primitive.
    for (int c = 0; c < numColumns; c++) {
      if (columnTypes.get(c).getCategory() != Category.PRIMITIVE) {
        throw new SerDeException(getClass().getName()
            + " only accepts primitive columns, but column[" + c + "] named "
            + columnNames.get(c) + " has category "
            + columnTypes.get(c).getCategory());
      }
    }
View Full Code Here

      // The next byte should be the marker
      assert tbIn.readTypeCode() == Type.ENDOFRECORD;

    } catch (IOException e) {
      throw new SerDeException(e);
    }

    return row;
  }
View Full Code Here

      // End of the record is part of the data
      tbOut.writeEndOfRecord();

      serializeBytesWritable.set(barrStr.getData(), 0, barrStr.getLength());
    } catch (IOException e) {
      throw new SerDeException(e.getMessage());
    }
    return serializeBytesWritable;
  }
View Full Code Here

      if (bt == null) {
        bt = (DynamicSerDeStructBase) parse_tree.tables.get(type_name);
      }

      if (bt == null) {
        throw new SerDeException("Could not lookup table type " + type_name
            + " in this ddl: " + ddl);
      }

      bt.initialize();
    } catch (Exception e) {
      System.err.println(StringUtils.stringifyException(e));
      throw new SerDeException(e);
    }
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.serde2.SerDeException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.