Package org.apache.hadoop.hive.serde2.typeinfo

Examples of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo.precision()


        }
        return Boolean.parseBoolean( pigObj.toString() );
      case DECIMAL:
        BigDecimal bd = (BigDecimal)pigObj;
        DecimalTypeInfo dti = (DecimalTypeInfo)hcatFS.getTypeInfo();
        if(bd.precision() > dti.precision() || bd.scale() > dti.scale()) {
          handleOutOfRangeValue(pigObj, hcatFS);
          return null;
        }
        return HiveDecimal.create(bd);
      case CHAR:
View Full Code Here


    DecimalTypeInfo typeInfo = (DecimalTypeInfo)oi.getTypeInfo();
    if (typeInfo == null) {
      throw new RuntimeException("Decimal type used without type params");
    }

    precision = typeInfo.precision();
    scale = typeInfo.scale();
    data = new HiveDecimalWritable();
  }

  public LazyHiveDecimal(LazyHiveDecimal copy) {
View Full Code Here

    // A little inefficiency here as we need to create a HiveDecimal instance from the writable and
    // recreate a HiveDecimalWritable instance on the HiveDecimal instance. However, we don't know
    // the precision/scale of the original writable until we get a HiveDecimal instance from it.
    DecimalTypeInfo decTypeInfo = (DecimalTypeInfo)typeInfo;
    HiveDecimal dec = value == null ? null :
      value.getHiveDecimal(decTypeInfo.precision(), decTypeInfo.scale());
    if (dec == null) {
      return null;
    }
    return new HiveDecimalWritable(dec);
  }
View Full Code Here

  LazyBinaryHiveDecimal(WritableHiveDecimalObjectInspector oi) {
    super(oi);

    DecimalTypeInfo typeInfo = (DecimalTypeInfo) oi.getTypeInfo();
    this.precision = typeInfo.precision();
    this.scale = typeInfo.scale();
    data = new HiveDecimalWritable();
  }

  LazyBinaryHiveDecimal(LazyBinaryHiveDecimal copy) {
View Full Code Here

      // expected type for the row-mode aggregation
      // For decimal, the type is "same number of integer digits and 4 more decimal digits"
     
      DecimalTypeInfo dtiSum = GenericUDAFAverage.deriveSumFieldTypeInfo(inputPrecision, inputScale);
      this.sumScale = (short) dtiSum.scale();
      this.sumPrecision = (short) dtiSum.precision();
     
      List<ObjectInspector> foi = new ArrayList<ObjectInspector>();
      foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
      foi.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(dtiSum));
      List<String> fname = new ArrayList<String>();
View Full Code Here

  @Override
  public void init(AggregationDesc desc) throws HiveException {
    ExprNodeDesc inputExpr = desc.getParameters().get(0);
    DecimalTypeInfo tiInput = (DecimalTypeInfo) inputExpr.getTypeInfo();
    this.inputScale = (short) tiInput.scale();
    this.inputPrecision = (short) tiInput.precision();

    initPartialResultInspector();
  }

  public VectorExpression getInputExpression() {
View Full Code Here

    PrimitiveTypeInfo resultTypeInfo = null;
    PrimitiveTypeInfo inputTypeInfo = inputOI.getTypeInfo();
    if (inputTypeInfo instanceof DecimalTypeInfo) {
      DecimalTypeInfo decTypeInfo = (DecimalTypeInfo) inputTypeInfo;
      resultTypeInfo = TypeInfoFactory.getDecimalTypeInfo(
          decTypeInfo.precision() - decTypeInfo.scale() + 1, 0);
      ObjectInspector decimalOI = PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(decTypeInfo);
      converter =  ObjectInspectorConverters.getConverter(inputOI, decimalOI);
    } else {
      resultTypeInfo = TypeInfoFactory.longTypeInfo;
      ObjectInspector doubleObjectInspector = PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
View Full Code Here

            type.setKind(OrcProto.Type.Kind.DATE);
            break;
          case DECIMAL:
            DecimalTypeInfo decTypeInfo = (DecimalTypeInfo)((PrimitiveObjectInspector)treeWriter.inspector).getTypeInfo();
            type.setKind(OrcProto.Type.Kind.DECIMAL);
            type.setPrecision(decTypeInfo.precision());
            type.setScale(decTypeInfo.scale());
            break;
          default:
            throw new IllegalArgumentException("Unknown primitive category: " +
              ((PrimitiveObjectInspector) treeWriter.inspector).
View Full Code Here

            result.cols[j] = new BytesColumnVector(VectorizedRowBatch.DEFAULT_SIZE);
            break;
          case DECIMAL:
            DecimalTypeInfo tInfo = (DecimalTypeInfo) poi.getTypeInfo();
            result.cols[j] = new DecimalColumnVector(VectorizedRowBatch.DEFAULT_SIZE,
                tInfo.precision(), tInfo.scale());
            break;
          default:
            throw new RuntimeException("Vectorizaton is not supported for datatype:"
                + poi.getPrimitiveCategory());
          }
View Full Code Here

    // A little inefficiency here as we need to create a HiveDecimal instance from the writable and
    // recreate a HiveDecimalWritable instance on the HiveDecimal instance. However, we don't know
    // the precision/scale of the original writable until we get a HiveDecimal instance from it.
    DecimalTypeInfo decTypeInfo = (DecimalTypeInfo)typeInfo;
    HiveDecimal dec = value == null ? null :
      value.getHiveDecimal(decTypeInfo.precision(), decTypeInfo.scale());
    if (dec == null) {
      return null;
    }
    return new HiveDecimalWritable(dec);
  }
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.