Package org.apache.hadoop.hive.serde2.io

Examples of org.apache.hadoop.hive.serde2.io.HiveVarcharWritable


  @Test
  public void testVarcharPlusInt() throws HiveException {
    GenericUDFOPPlus udf = new GenericUDFOPPlus();

    // Short
    HiveVarcharWritable left = new HiveVarcharWritable();
    left.set("123");
    IntWritable right = new IntWritable(456);
    ObjectInspector[] inputOIs = {
        PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector,
        PrimitiveObjectInspectorFactory.writableIntObjectInspector
    };
View Full Code Here


  @Test
  public void testVarcharPowerInt() throws HiveException {
    GenericUDFPower udf = new GenericUDFPower();

    HiveVarcharWritable left = new HiveVarcharWritable();
    left.set("3.14");
    IntWritable right = new IntWritable(2);
    ObjectInspector[] inputOIs = {
        PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector,
        PrimitiveObjectInspectorFactory.writableIntObjectInspector
    };
View Full Code Here

      this.maxLength = maxLength;
    }

    @Override
    Object next(Object previous) throws IOException {
      HiveVarcharWritable result = null;
      if (previous == null) {
        result = new HiveVarcharWritable();
      } else {
        result = (HiveVarcharWritable) previous;
      }
      // Use the string reader implementation to populate the internal Text value
      Object textVal = super.next(result.getTextValue());
      if (textVal == null) {
        return null;
      }
      // result should now hold the value that was read in.
      // enforce varchar length
      result.enforceMaxLength(maxLength);
      return result;
    }
View Full Code Here

          break;
        case CHAR:
          returnValue = new HiveCharWritable();
          break;
        case VARCHAR:
          returnValue = new HiveVarcharWritable();
          break;
        default:
          throw new UDFArgumentException("Unexpected non-string type " + type);
      }
    }
View Full Code Here

  @Test
  public void testVarchar() throws HiveException {
    GenericUDFOPNegative udf = new GenericUDFOPNegative();

    HiveVarchar vc = new HiveVarchar("32300.004747", 12);
    HiveVarcharWritable input = new HiveVarcharWritable(vc);
    VarcharTypeInfo inputTypeInfo = TypeInfoFactory.getVarcharTypeInfo(12);
    ObjectInspector[] inputOIs = {
        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo),
    };
    DeferredObject[] args = {
View Full Code Here

  @Test
  public void testVarchar() throws HiveException {
    GenericUDFCeil udf = new GenericUDFCeil();

    HiveVarchar vc = new HiveVarchar("32300.004747", 12);
    HiveVarcharWritable input = new HiveVarcharWritable(vc);
    VarcharTypeInfo inputTypeInfo = TypeInfoFactory.getVarcharTypeInfo(12);
    ObjectInspector[] inputOIs = {
        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo),
    };
    DeferredObject[] args = {
View Full Code Here

  @Test
  public void testVarchar() throws HiveException {
    GenericUDFOPPositive udf = new GenericUDFOPPositive();

    HiveVarchar vc = new HiveVarchar("32300.004747", 12);
    HiveVarcharWritable input = new HiveVarcharWritable(vc);
    VarcharTypeInfo inputTypeInfo = TypeInfoFactory.getVarcharTypeInfo(12);
    ObjectInspector[] inputOIs = {
        PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo),
    };
    DeferredObject[] args = {
View Full Code Here

  @Test
  public void testVarcharTimesInt() throws HiveException {
    GenericUDFOPMultiply udf = new GenericUDFOPMultiply();

    HiveVarcharWritable left = new HiveVarcharWritable();
    left.set("123");
    IntWritable right = new IntWritable(456);
    ObjectInspector[] inputOIs = {
        PrimitiveObjectInspectorFactory.writableHiveVarcharObjectInspector,
        PrimitiveObjectInspectorFactory.writableIntObjectInspector
    };
View Full Code Here

  private Writable getWritableValue(TypeInfo ti, byte[] value) {
    if (ti.equals(TypeInfoFactory.stringTypeInfo)) {
      return new Text(value);
    } else if (ti.equals(TypeInfoFactory.varcharTypeInfo)) {
      return new HiveVarcharWritable(
          new HiveVarchar(new Text(value).toString(), -1));
    } else if (ti.equals(TypeInfoFactory.binaryTypeInfo)) {
      return new BytesWritable(value);
    }
    return null;
View Full Code Here

        HiveCharWritable t1 = ((HiveCharObjectInspector)poi1).getPrimitiveWritableObject(o1);
        HiveCharWritable t2 = ((HiveCharObjectInspector)poi2).getPrimitiveWritableObject(o2);
        return t1.compareTo(t2);
      }
      case VARCHAR: {
        HiveVarcharWritable t1 = ((HiveVarcharObjectInspector)poi1).getPrimitiveWritableObject(o1);
        HiveVarcharWritable t2 = ((HiveVarcharObjectInspector)poi2).getPrimitiveWritableObject(o2);
        return t1.compareTo(t2);
      }
      case BINARY: {
        BytesWritable bw1 = ((BinaryObjectInspector) poi1).getPrimitiveWritableObject(o1);
        BytesWritable bw2 = ((BinaryObjectInspector) poi2).getPrimitiveWritableObject(o2);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.serde2.io.HiveVarcharWritable

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.