Package org.apache.hadoop.hive.serde2

Examples of org.apache.hadoop.hive.serde2.SerDe.initialize()


      return null;
    }
    SerDe sd = (SerDe) ReflectionUtils.newInstance(desc.getDeserializerClass(),
        null);
    try {
      sd.initialize(null, desc.getProperties());
    } catch (SerDeException e) {
      e.printStackTrace();
      return null;
    }
    return sd;
View Full Code Here


    writer.write(serde.serialize(new MyRow(3,2), inspector));
    writer.close(true);
    serde = new OrcSerde();
    properties.setProperty("columns", "x,y");
    properties.setProperty("columns.types", "int:int");
    serde.initialize(conf, properties);
    assertEquals(OrcSerde.OrcSerdeRow.class, serde.getSerializedClass());
    inspector = (StructObjectInspector) serde.getObjectInspector();
    assertEquals("struct<x:int,y:int>", inspector.getTypeName());
    InputFormat<?,?> in = new OrcInputFormat();
    FileInputFormat.setInputPaths(conf, testFilePath.toString());
View Full Code Here

        serde.serialize(new NestedRow(7,8,9), inspector));
    writer.close(Reporter.NULL);
    serde = new OrcSerde();
    properties.setProperty("columns", "z,r");
    properties.setProperty("columns.types", "int:struct<x:int,y:int>");
    serde.initialize(conf, properties);
    inspector = (StructObjectInspector) serde.getObjectInspector();
    InputFormat<?,?> in = new OrcInputFormat();
    FileInputFormat.setInputPaths(conf, testFilePath.toString());
    InputSplit[] splits = in.getSplits(conf, 1);
    assertEquals(1, splits.length);
View Full Code Here

            properties, Reporter.NULL);
    writer.close(true);
    properties.setProperty("columns", "x,y");
    properties.setProperty("columns.types", "int:int");
    SerDe serde = new OrcSerde();
    serde.initialize(conf, properties);
    InputFormat<?,?> in = new OrcInputFormat();
    FileInputFormat.setInputPaths(conf, testFilePath.toString());
    InputSplit[] splits = in.getSplits(conf, 1);
    assertTrue(1 == splits.length);
    assertEquals(null, serde.getSerDeStats());
View Full Code Here

    writer.write(serde.serialize(new StringRow("colin"), inspector));
    writer.write(serde.serialize(new StringRow("miles"), inspector));
    writer.close(true);
    serde = new OrcSerde();
    properties.setProperty("columns", "str,str2");
    serde.initialize(conf, properties);
    inspector = (StructObjectInspector) serde.getObjectInspector();
    assertEquals("struct<str:string,str2:string>", inspector.getTypeName());
    InputFormat<?,?> in = new OrcInputFormat();
    FileInputFormat.setInputPaths(conf, testFilePath.toString());
    InputSplit[] splits = in.getSplits(conf, 1);
View Full Code Here

      serDeProps.setProperty(serdeName, serdePropsMap.get(serdeName));
    }
    try {
      SerDe serDe =  ReflectionUtils.newInstance(hConf.getClassByName(serdeClassName).
          asSubclass(SerDe.class), hConf);
      serDe.initialize(hConf, serDeProps);
      shp.setSerde(serDe);
      StructObjectInspector outOI = PTFPartition.setupPartitionOutputOI(serDe, OI);
      shp.setOI(outOI);
    } catch (Exception se) {
      throw new HiveException(se);
View Full Code Here

    Properties serdeProps = new Properties();
    if (serde instanceof DelimitedJSONSerDe) {
      serdeProps.put(SERIALIZATION_FORMAT, props.getProperty(SERIALIZATION_FORMAT));
      serdeProps.put(SERIALIZATION_NULL_FORMAT, props.getProperty(SERIALIZATION_NULL_FORMAT));
    }
    serde.initialize(conf, serdeProps);
    return serde;
  }

  @Override
  public String convert(Object row, ObjectInspector rowOI) throws Exception {
View Full Code Here

    }
    try {
      TableDesc keyTableDesc = conf.getKeyTblDesc();
      SerDe keySerde = (SerDe) ReflectionUtils.newInstance(keyTableDesc.getDeserializerClass(),
          null);
      keySerde.initialize(null, keyTableDesc.getProperties());
      MapJoinObjectSerDeContext keyContext = new MapJoinObjectSerDeContext(keySerde, false);
      for (Byte pos : order) {
        if (pos == posBigTableAlias) {
          continue;
        }
View Full Code Here

          continue;
        }
        mapJoinTables[pos] = new HashMapWrapper(hashTableThreshold, hashTableLoadFactor);
        TableDesc valueTableDesc = conf.getValueTblFilteredDescs().get(pos);
        SerDe valueSerDe = (SerDe) ReflectionUtils.newInstance(valueTableDesc.getDeserializerClass(), null);
        valueSerDe.initialize(null, valueTableDesc.getProperties());
        mapJoinTableSerdes[pos] = new MapJoinTableContainerSerDe(keyContext, new MapJoinObjectSerDeContext(
            valueSerDe, hasFilter(pos)));
      }
    } catch (SerDeException e) {
      throw new HiveException(e);
View Full Code Here

    p.setProperty(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS,
        serdePropsMap.get(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS));
    p.setProperty(
        org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES,
        serdePropsMap.get(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES));
    serDe.initialize(cfg, p);
    return serDe;
  }

  @SuppressWarnings({"unchecked"})
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.