Package org.apache.hadoop.hive.serde2.typeinfo

Examples of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo


        case CHAR:
        case VARCHAR:
          cvList.add(new BytesColumnVector(VectorizedRowBatch.DEFAULT_SIZE));
          break;
        case DECIMAL:
          DecimalTypeInfo tInfo = (DecimalTypeInfo) poi.getTypeInfo();
          cvList.add(new DecimalColumnVector(VectorizedRowBatch.DEFAULT_SIZE,
              tInfo.precision(), tInfo.scale()));
          break;
        default:
          throw new HiveException("Vectorizaton is not supported for datatype:"
              + poi.getPrimitiveCategory());
        }
View Full Code Here


    private void initPartialResultInspector() {
      // the output type of the vectorized partial aggregate must match the
      // expected type for the row-mode aggregation
      // For decimal, the type is "same number of integer digits and 4 more decimal digits"
     
      DecimalTypeInfo dtiSum = GenericUDAFAverage.deriveSumFieldTypeInfo(inputPrecision, inputScale);
      this.sumScale = (short) dtiSum.scale();
      this.sumPrecision = (short) dtiSum.precision();
     
      List<ObjectInspector> foi = new ArrayList<ObjectInspector>();
      foi.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
      foi.add(PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(dtiSum));
      List<String> fname = new ArrayList<String>();
View Full Code Here

  }

  @Override
  public void init(AggregationDesc desc) throws HiveException {
    ExprNodeDesc inputExpr = desc.getParameters().get(0);
    DecimalTypeInfo tiInput = (DecimalTypeInfo) inputExpr.getTypeInfo();
    this.inputScale = (short) tiInput.scale();
    this.inputPrecision = (short) tiInput.precision();

    initPartialResultInspector();
  }
View Full Code Here

              if (genericUDF != null) {
                ((SettableUDF)genericUDF).setTypeInfo(varcharTypeInfo);
              }
              break;
            case HiveParser.TOK_DECIMAL:
              DecimalTypeInfo decTypeInfo = ParseUtils.getDecimalTypeTypeInfo(funcNameNode);
              if (genericUDF != null) {
                ((SettableUDF)genericUDF).setTypeInfo(decTypeInfo);
              }
              break;
            default:
View Full Code Here

    testWriterLong(TypeInfoFactory.longTypeInfo);
  }

  @Test
  public void testVectorExpressionWriterDecimal() throws HiveException {
    DecimalTypeInfo typeInfo = TypeInfoFactory.getDecimalTypeInfo(38, 18);
    testWriterDecimal(typeInfo);
  }
View Full Code Here

    testWriterDecimal(typeInfo);
  }

  @Test
  public void testVectorExpressionSetterDecimal() throws HiveException {
    DecimalTypeInfo typeInfo = TypeInfoFactory.getDecimalTypeInfo(38, 18);
    testSetterDecimal(typeInfo);
  }
View Full Code Here

      aggregation.sum = HiveDecimal.ZERO;
    }

    @Override
    protected ObjectInspector getSumFieldJavaObjectInspector() {
      DecimalTypeInfo typeInfo = deriveResultDecimalTypeInfo();
      return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(typeInfo);
    }
View Full Code Here

      return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(typeInfo);
    }

    @Override
    protected ObjectInspector getSumFieldWritableObjectInspector() {
      DecimalTypeInfo typeInfo = deriveResultDecimalTypeInfo();
      return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(typeInfo);
    }
View Full Code Here

      super.init(m, parameters);
      result = new HiveDecimalWritable(HiveDecimal.ZERO);
      inputOI = (PrimitiveObjectInspector) parameters[0];
      // The output precision is 10 greater than the input which should cover at least
      // 10b rows. The scale is the same as the input.
      DecimalTypeInfo outputTypeInfo = null;
      if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
        int precision = Math.min(HiveDecimal.MAX_PRECISION, inputOI.precision() + 10);
        outputTypeInfo = TypeInfoFactory.getDecimalTypeInfo(precision, inputOI.scale());
      } else {
        outputTypeInfo = (DecimalTypeInfo) inputOI.getTypeInfo();
View Full Code Here

  private final int precision;
  private final int scale;

  public LazyHiveDecimal(LazyHiveDecimalObjectInspector oi) {
    super(oi);
    DecimalTypeInfo typeInfo = (DecimalTypeInfo)oi.getTypeInfo();
    if (typeInfo == null) {
      throw new RuntimeException("Decimal type used without type params");
    }

    precision = typeInfo.precision();
    scale = typeInfo.scale();
    data = new HiveDecimalWritable();
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.