Examples of HiveDecimal


Examples of org.apache.hadoop.hive.common.type.HiveDecimal

    if (o == null) {
      return null;
    }

    if (o instanceof String) {
      HiveDecimal dec = enforcePrecisionScale(HiveDecimal.create((String)o));
      return dec == null ? null : new HiveDecimalWritable(dec);
    }

    HiveDecimal dec = enforcePrecisionScale((HiveDecimal)o);
    return dec == null ? null : new HiveDecimalWritable(dec);
  }
View Full Code Here

Examples of org.apache.hadoop.hive.common.type.HiveDecimal

    // We need to enforce precision/scale here.
    // A little inefficiency here as we need to create a HiveDecimal instance from the writable and
    // recreate a HiveDecimalWritable instance on the HiveDecimal instance. However, we don't know
    // the precision/scale of the original writable until we get a HiveDecimal instance from it.
    DecimalTypeInfo decTypeInfo = (DecimalTypeInfo)typeInfo;
    HiveDecimal dec = value == null ? null :
      value.getHiveDecimal(decTypeInfo.precision(), decTypeInfo.scale());
    if (dec == null) {
      return null;
    }
    return new HiveDecimalWritable(dec);
View Full Code Here

Examples of org.apache.hadoop.hive.common.type.HiveDecimal

      DecimalTypeInfo typeInfo) {
    if (writable == null) {
      return null;
    }

    HiveDecimal dec = enforcePrecisionScale(writable.getHiveDecimal(), typeInfo);
    return dec == null ? null : new HiveDecimalWritable(dec);
  }
View Full Code Here

Examples of org.apache.hadoop.hive.common.type.HiveDecimal

      int precision, int scale) {
    if (writable == null) {
      return null;
    }

    HiveDecimal dec = enforcePrecisionScale(writable.getHiveDecimal(), precision, scale);
    return dec == null ? null : new HiveDecimalWritable(dec);
  }
View Full Code Here

Examples of org.apache.hadoop.hive.common.type.HiveDecimal

  }

  @Override
  public void init(ByteArrayRef bytes, int start, int length) {
    data.setFromBytes(bytes.getData(), start, length);
    HiveDecimal dec = data.getHiveDecimal(precision, scale);
    data = dec == null ? null : new HiveDecimalWritable(dec);
  }
View Full Code Here

Examples of org.apache.hadoop.hive.common.type.HiveDecimal

  public HiveDecimal getPrimitiveJavaObject(Object o) {
    if (o == null) {
      return null;
    }

    HiveDecimal dec = ((LazyHiveDecimal)o).getWritableObject().getHiveDecimal();
    return HiveDecimalUtils.enforcePrecisionScale(dec, (DecimalTypeInfo) typeInfo);
  }
View Full Code Here

Examples of org.apache.hadoop.hive.common.type.HiveDecimal

        // read the null byte again
        buffer.read(positive ? invert : !invert);

        String digits = new String(decimalBuffer, 0, length, decimalCharSet);
        BigInteger bi = new BigInteger(digits);
        HiveDecimal bd = HiveDecimal.create(bi).scaleByPowerOfTen(factor-length);

        if (!positive) {
          bd = bd.negate();
        }

        bdw.set(bd);
        return bdw;
      }
View Full Code Here

Examples of org.apache.hadoop.hive.common.type.HiveDecimal

        // Sign is 3 (bigger than 0)
        // Factor is -2 (move decimal point 2 positions right)
        // Digits are: 123

        HiveDecimalObjectInspector boi = (HiveDecimalObjectInspector) poi;
        HiveDecimal dec = boi.getPrimitiveJavaObject(o);

        // get the sign of the big decimal
        int sign = dec.compareTo(HiveDecimal.ZERO);

    // we'll encode the absolute value (sign is separate)
    dec = dec.abs();

    // get the scale factor to turn big decimal into a decimal < 1
    int factor = dec.precision() - dec.scale();
    factor = sign == 1 ? factor : -factor;

        // convert the absolute big decimal to string
        dec.scaleByPowerOfTen(Math.abs(dec.scale()));
        String digits = dec.unscaledValue().toString();

        // finally write out the pieces (sign, scale, digits)
        buffer.write((byte) ( sign + 1), invert);
        buffer.write((byte) ((factor >> 24) ^ 0x80), invert);
        buffer.write((byte) ( factor >> 16), invert);
View Full Code Here

Examples of org.apache.hadoop.hive.common.type.HiveDecimal

  public void testDecimalToTimestampCornerCases() {
    Timestamp ts = new Timestamp(parseToMillis("1969-03-04 05:44:33"));
    assertEquals(0, ts.getTime() % 1000);
    for (int nanos : new int[] { 100000, 900000, 999100000, 999900000 }) {
      ts.setNanos(nanos);
      HiveDecimal d = timestampToDecimal(ts);
      assertEquals(ts, TimestampWritable.decimalToTimestamp(d));
      assertEquals(ts, TimestampWritable.doubleToTimestamp(d.bigDecimalValue().doubleValue()));
    }
  }
View Full Code Here

Examples of org.apache.hadoop.hive.common.type.HiveDecimal

      int bufferUsed = dec.fastSerializeForHiveDecimal(scratch);
      HiveDecimalWritable hdw = new HiveDecimalWritable();
      hdw.set(scratch.getBytes(bufferUsed), dec.getScale());

      HiveDecimal hd = hdw.getHiveDecimal();

      BigDecimal readValue = hd.bigDecimalValue();

      Assert.assertEquals(value, readValue);

      // Now test fastUpdate from the same serialized HiveDecimal
      Decimal128 decRead = new Decimal128().fastUpdateFromInternalStorage(
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.