Package org.apache.hadoop.hive.accumulo.columns

Examples of org.apache.hadoop.hive.accumulo.columns.ColumnMapping


  @Test
  public void testBufferResetBeforeUse() throws IOException {
    ByteStream.Output output = new ByteStream.Output();
    PrimitiveObjectInspector fieldObjectInspector = Mockito.mock(StringObjectInspector.class);
    ColumnMapping mapping = Mockito.mock(ColumnMapping.class);

    // Write some garbage to the buffer that should be erased
    output.write("foobar".getBytes());

    // Stub out the serializer
    AccumuloRowSerializer serializer = Mockito.mock(AccumuloRowSerializer.class);

    String object = "hello";

    Mockito.when(
        serializer.getSerializedValue(Mockito.any(ObjectInspector.class), Mockito.any(),
            Mockito.any(ByteStream.Output.class), Mockito.any(ColumnMapping.class)))
        .thenCallRealMethod();

    Mockito.when(fieldObjectInspector.getCategory()).thenReturn(ObjectInspector.Category.PRIMITIVE);
    Mockito.when(fieldObjectInspector.getPrimitiveCategory()).thenReturn(PrimitiveCategory.STRING);
    Mockito.when(fieldObjectInspector.getPrimitiveWritableObject(Mockito.any(Object.class)))
        .thenReturn(new Text(object));
    Mockito.when(mapping.getEncoding()).thenReturn(ColumnEncoding.STRING);

    // Invoke the method
    serializer.getSerializedValue(fieldObjectInspector, object, output, mapping);

    // Verify the buffer was reset (real output doesn't happen because it was mocked)
View Full Code Here


      // The ObjectInspector for the current column
      fieldObjectInspector = field.getFieldObjectInspector();

      // Make sure we got the right implementation of a ColumnMapping
      ColumnMapping mapping = mappings.get(i);
      if (mapping instanceof HiveAccumuloColumnMapping) {
        serializeColumnMapping((HiveAccumuloColumnMapping) mapping, fieldObjectInspector, value,
            mutation);
      } else if (mapping instanceof HiveAccumuloMapColumnMapping) {
        serializeColumnMapping((HiveAccumuloMapColumnMapping) mapping, fieldObjectInspector, value,
            mutation);
      } else {
        throw new IllegalArgumentException("Mapping for " + field.getFieldName()
            + " was not a HiveColumnMapping, but was " + mapping.getClass());
      }

    }

    return mutation;
View Full Code Here

      throws SerDeException {
    ArrayList<ObjectInspector> columnObjectInspectors = new ArrayList<ObjectInspector>(
        columnTypes.size());
    for (int i = 0; i < columnTypes.size(); i++) {
      TypeInfo type = columnTypes.get(i);
      ColumnMapping mapping = mappings.get(i);
      if (mapping instanceof HiveAccumuloRowIdColumnMapping) {
        columnObjectInspectors.add(factory.createRowIdObjectInspector(type));
      } else {
        columnObjectInspectors.add(LazyFactory.createLazyObjectInspector(type,
            serDeParams.getSeparators(), 1, serDeParams.getNullSequence(), serDeParams.isEscaped(),
View Full Code Here

   * split pairs by delimiter.
   */
  private Object uncheckedGetField(int id) {
    if (!getFieldInited()[id]) {
      ByteArrayRef ref;
      ColumnMapping columnMapping = columnMappings.get(id);

      if (columnMapping instanceof HiveAccumuloMapColumnMapping) {
        HiveAccumuloMapColumnMapping mapColumnMapping = (HiveAccumuloMapColumnMapping) columnMapping;

        LazyAccumuloMap map = (LazyAccumuloMap) getFields()[id];
        map.init(row, mapColumnMapping);
      } else {
        if (columnMapping instanceof HiveAccumuloRowIdColumnMapping) {
          // Use the rowID directly
          ref = new ByteArrayRef();
          ref.setData(row.getRowId().getBytes());
        } else if (columnMapping instanceof HiveAccumuloColumnMapping) {
          HiveAccumuloColumnMapping accumuloColumnMapping = (HiveAccumuloColumnMapping) columnMapping;

          // Use the colfam and colqual to get the value
          byte[] val = row.getValue(new Text(accumuloColumnMapping.getColumnFamily()), new Text(
              accumuloColumnMapping.getColumnQualifier()));
          if (val == null) {
            return null;
          } else {
            ref = new ByteArrayRef();
            ref.setData(val);
          }
        } else {
          log.error("Could not process ColumnMapping of type " + columnMapping.getClass()
              + " at offset " + id + " in column mapping: " + columnMapping.getMappingSpec());
          throw new IllegalArgumentException("Cannot process ColumnMapping of type "
              + columnMapping.getClass());
        }

        getFields()[id].init(ref, 0, ref.getData().length);
      }

View Full Code Here

    return cachedList;
  }

  @Override
  protected LazyObjectBase createLazyField(int fieldID, StructField fieldRef) throws SerDeException {
    final ColumnMapping columnMapping = columnMappings.get(fieldID);

    if (columnMapping instanceof HiveAccumuloRowIdColumnMapping) {
      return rowIdFactory.createRowId(fieldRef.getFieldObjectInspector());
    } else if (columnMapping instanceof HiveAccumuloMapColumnMapping) {
      return new LazyAccumuloMap((LazyMapObjectInspector) fieldRef.getFieldObjectInspector());
    } else {
      return LazyFactory.createLazyObject(fieldRef.getFieldObjectInspector(),
          ColumnEncoding.BINARY == columnMapping.getEncoding());
    }
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.accumulo.columns.ColumnMapping

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.