Package org.apache.hadoop.hive.serde2

Examples of org.apache.hadoop.hive.serde2.SerDeException


  @Override
  public Writable serialize(Object obj, ObjectInspector objInspector)
      throws SerDeException {
   
    if (outputFormatString == null) {
      throw new SerDeException("Cannot write data into table because \"output.format.string\""
          + " is not specified in serde properties of the table.");
    }
   
    // Get all the fields out.
    // NOTE: The correct way to get fields out of the row is to use objInspector.
    // The obj can be a Java ArrayList, or a Java class, or a byte[] or whatever.
    // The only way to access the data inside the obj is through ObjectInspector.
   
    StructObjectInspector outputRowOI = (StructObjectInspector)objInspector;
    List<? extends StructField> outputFieldRefs = outputRowOI.getAllStructFieldRefs();
    if (outputFieldRefs.size() != numColumns) {
      throw new SerDeException("Cannot serialize the object because there are "
          + outputFieldRefs.size() + " fields but the table has " + numColumns +
          " columns.");
    }
   
    // Get all data out.
    for (int c = 0; c < numColumns; c++) {
      Object field = outputRowOI.getStructFieldData(obj, outputFieldRefs.get(c));
      ObjectInspector fieldOI = outputFieldRefs.get(c).getFieldObjectInspector();
      // The data must be of type String
      StringObjectInspector fieldStringOI = (StringObjectInspector)fieldOI;
      // Convert the field to Java class String, because objects of String type can be
      // stored in String, Text, or some other classes.
      outputFields[c] = fieldStringOI.getPrimitiveJavaObject(field);
    }
   
    // Format the String
    String outputRowString = null;
    try {
      outputRowString = String.format(outputFormatString, outputFields);
    } catch (MissingFormatArgumentException e) {
      throw new SerDeException("The table contains " + numColumns
          + " columns, but the outputFormatString is asking for more.", e);
    }
    outputRowText.set(outputRowString);
    return outputRowText;
  }
View Full Code Here


      if (bt == null) {
        bt = (DynamicSerDeStructBase) parse_tree.tables.get(type_name);
      }

      if (bt == null) {
        throw new SerDeException("Could not lookup table type " + type_name
            + " in this ddl: " + ddl);
      }

      bt.initialize();
    } catch (Exception e) {
      System.err.println(StringUtils.stringifyException(e));
      throw new SerDeException(e);
    }
  }
View Full Code Here

      }
      deserializeReuse = bt.deserialize(deserializeReuse, iprot_);
      return deserializeReuse;
    } catch (Exception e) {
      e.printStackTrace();
      throw new SerDeException(e);
    }
  }
View Full Code Here

      bos_.reset();
      bt.serialize(obj, objInspector, oprot_);
      oprot_.getTransport().flush();
    } catch (Exception e) {
      e.printStackTrace();
      throw new SerDeException(e);
    }
    ret.set(bos_.getData(), 0, bos_.getCount());
    return ret;
  }
View Full Code Here

      for (int i = 0; i < columnNames.size(); i++) {
        row.set(i, deserialize(inputByteBuffer, columnTypes.get(i),
            columnSortOrderIsDesc[i], row.get(i)));
      }
    } catch (IOException e) {
      throw new SerDeException(e);
    }

    return row;
  }
View Full Code Here

      TProtocolFactory tp = TReflectionUtils
          .getProtocolFactoryByName(protoName);
      tsd = new ThriftByteStreamTypedSerDe(recordClass, tp, tp);

    } catch (Exception e) {
      throw new SerDeException(e);
    }
  }
View Full Code Here

        return null;
      }
      byteArrayRef.setData(t.getBytes());
      cachedLazyBinaryStruct.init(byteArrayRef, 0, t.getLength());
    } else {
      throw new SerDeException(getClass().toString()
          + ": expects either BytesWritable or Text object!");
    }
    lastOperationSerialize = false;
    lastOperationDeserialize = true;
    return cachedLazyBinaryStruct;
View Full Code Here

  @Override
  public Writable serialize(Object obj, ObjectInspector objInspector)
      throws SerDeException {
    // make sure it is a struct record
    if (objInspector.getCategory() != Category.STRUCT) {
      throw new SerDeException(getClass().toString()
          + " can only serialize struct types, but we got: "
          + objInspector.getTypeName());
    }

    serializeByteStream.reset();
View Full Code Here

    numColumns = columnNames.size();

    // All columns have to be primitive.
    for (int c = 0; c < numColumns; c++) {
      if (columnTypes.get(c).getCategory() != Category.PRIMITIVE) {
        throw new SerDeException(getClass().getName()
            + " only accepts primitive columns, but column[" + c + "] named "
            + columnNames.get(c) + " has category "
            + columnTypes.get(c).getCategory());
      }
    }
View Full Code Here

      // The next byte should be the marker
      assert tbIn.readTypeCode() == Type.ENDOFRECORD;

    } catch (IOException e) {
      throw new SerDeException(e);
    }

    return row;
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.serde2.SerDeException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.