Package com.cloudera.cdk.data

Examples of com.cloudera.cdk.data.SchemaValidationException


    ObjectMapper mapper = new ObjectMapper();
    JsonNode avroRecordSchemaJson;
    try {
      avroRecordSchemaJson = mapper.readValue(rawSchema, JsonNode.class);
    } catch (IOException e) {
      throw new SchemaValidationException(
          "Could not parse the avro record as JSON.", e);
    }
    return avroRecordSchemaJson;
  }
View Full Code Here


          // Only one value type for a map, so just put the type in the column
          // datum maps.
          initColumnDatumMaps(fieldName, fieldSchema.getValueType(),
              writtenFieldSchema.getValueType());
        } else {
          throw new SchemaValidationException(
              "Unsupported type for keyAsColumn: "
                  + fieldMapping.getMappingValue());
        }
      }
    }
View Full Code Here

  @Override
  public byte[] serializeColumnValueToBytes(String fieldName, Object columnValue) {
    Field field = avroSchema.getAvroSchema().getField(fieldName);
    DatumWriter<Object> datumWriter = fieldDatumWriters.get(fieldName);
    if (field == null) {
      throw new SchemaValidationException("Invalid field name " + fieldName
          + " for schema " + avroSchema.toString());
    }
    if (datumWriter == null) {
      throw new SchemaValidationException("No datum writer for field name: "
          + fieldName);
    }

    ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
    Encoder encoder = getColumnEncoder(field.schema(), byteOut);
View Full Code Here

  @Override
  public byte[] serializeKeyAsColumnValueToBytes(String fieldName,
      CharSequence columnKey, Object columnValue) {
    Field field = avroSchema.getAvroSchema().getField(fieldName);
    if (field == null) {
      throw new SchemaValidationException("Invalid field name " + fieldName
          + " for schema " + avroSchema.toString());
    }

    Schema.Type schemaType = field.schema().getType();
    if (schemaType == Schema.Type.MAP) {
      DatumWriter<Object> datumWriter = fieldDatumWriters.get(fieldName);
      if (datumWriter == null) {
        throw new SchemaValidationException("No datum writer for field name: "
            + fieldName);
      }
      return AvroUtils.writeAvroEntity(columnValue, datumWriter);
    } else if (schemaType == Schema.Type.RECORD) {
      if (!kacRecordDatumWriters.containsKey(fieldName)) {
        throw new SchemaValidationException("Invalid field name " + fieldName
            + " for schema " + avroSchema.toString());
      }
      if (!kacRecordDatumWriters.get(fieldName).containsKey(
          columnKey.toString())) {
        throw new SchemaValidationException("Invalid key in record: "
            + fieldName + "." + columnKey);
      }
      DatumWriter<Object> datumWriter = kacRecordDatumWriters.get(fieldName)
          .get(columnKey.toString());
      return AvroUtils.writeAvroEntity(columnValue, datumWriter);
    } else {
      throw new SchemaValidationException("Unsupported type for keyAsColumn: "
          + schemaType);
    }
  }
View Full Code Here

  @Override
  public Object deserializeColumnValueFromBytes(String fieldName, byte[] bytes) {
    Field field = avroSchema.getAvroSchema().getField(fieldName);
    DatumReader<Object> datumReader = fieldDatumReaders.get(fieldName);
    if (field == null) {
      throw new SchemaValidationException("Invalid field name " + fieldName
          + " for schema " + avroSchema.toString());
    }
    if (datumReader == null) {
      throw new SchemaValidationException("No datum reader for field name: "
          + fieldName);
    }

    ByteArrayInputStream byteIn = new ByteArrayInputStream(bytes);
    Decoder decoder = getColumnDecoder(field.schema(), byteIn);
View Full Code Here

  @Override
  public Object deserializeKeyAsColumnValueFromBytes(String fieldName,
      byte[] columnKeyBytes, byte[] columnValueBytes) {
    Field field = avroSchema.getAvroSchema().getField(fieldName);
    if (field == null) {
      throw new SchemaValidationException("Invalid field name " + fieldName
          + " for schema " + avroSchema.toString());
    }

    Schema.Type schemaType = field.schema().getType();
    if (schemaType == Schema.Type.MAP) {
      DatumReader<Object> datumReader = fieldDatumReaders.get(fieldName);
      if (datumReader == null) {
        throw new SchemaValidationException("No datum reader for field name: "
            + fieldName);
      }
      return AvroUtils.readAvroEntity(columnValueBytes, datumReader);
    } else if (schemaType == Schema.Type.RECORD) {
      if (!kacRecordDatumReaders.containsKey(fieldName)) {
        throw new SchemaValidationException("Invalid field name " + fieldName
            + " for schema " + avroSchema.toString());
      }
      String columnKey = new String(columnKeyBytes);
      if (!kacRecordDatumReaders.get(fieldName).containsKey(columnKey)) {
        throw new SchemaValidationException("Invalid key in record: "
            + fieldName + "." + columnKey);
      }
      DatumReader<Object> datumReader = kacRecordDatumReaders.get(fieldName)
          .get(columnKey);
      return AvroUtils.readAvroEntity(columnValueBytes, datumReader);
    } else {
      throw new SchemaValidationException("Unsupported type for keyAsColumn: "
          + schemaType);
    }
  }
View Full Code Here

  @Override
  public CharSequence deserializeKeyAsColumnKeyFromBytes(String fieldName,
      byte[] columnKeyBytes) {
    Field field = avroSchema.getAvroSchema().getField(fieldName);
    if (field == null) {
      throw new SchemaValidationException("Invalid field name " + fieldName
          + " for schema " + avroSchema.toString());
    }

    Schema.Type schemaType = field.schema().getType();
    if (schemaType == Schema.Type.MAP) {
      String stringProp = field.schema().getProp("avro.java.string");
      if (stringProp != null && stringProp.equals("String")) {
        return new String(columnKeyBytes);
      } else {
        return new Utf8(columnKeyBytes);
      }
    } else if (schemaType == Schema.Type.RECORD) {
      return new String(columnKeyBytes);
    } else {
      throw new SchemaValidationException("Unsupported type for keyAsColumn: "
          + schemaType);
    }
  }
View Full Code Here

  @Override
  public Object extractField(E entity, String fieldName) {
    Schema schema = avroSchema.getAvroSchema();
    Field field = schema.getField(fieldName);
    if (field == null) {
      throw new SchemaValidationException("No field named " + fieldName
          + " in schema " + schema);
    }
    Object fieldValue = entity.get(field.pos());
    if (fieldValue == null) {
      // if the field value is null, and the field is a primitive type,
View Full Code Here

  public Map<CharSequence, Object> extractKeyAsColumnValues(String fieldName,
      Object fieldValue) {
    Schema schema = avroSchema.getAvroSchema();
    Field field = schema.getField(fieldName);
    if (field == null) {
      throw new SchemaValidationException("No field named " + fieldName
          + " in schema " + schema);
    }
    if (field.schema().getType() == Schema.Type.MAP) {
      return new HashMap<CharSequence, Object>(
          (Map<CharSequence, Object>) fieldValue);
    } else if (field.schema().getType() == Schema.Type.RECORD) {
      Map<CharSequence, Object> keyAsColumnValues = new HashMap<CharSequence, Object>();
      IndexedRecord avroRecord = (IndexedRecord) fieldValue;
      for (Field avroRecordField : avroRecord.getSchema().getFields()) {
        keyAsColumnValues.put(avroRecordField.name(),
            avroRecord.get(avroRecordField.pos()));
      }
      return keyAsColumnValues;
    } else {
      throw new SchemaValidationException(
          "Only MAP or RECORD type valid for keyAsColumn fields. Found "
              + field.schema().getType());
    }
  }
View Full Code Here

  public Object buildKeyAsColumnField(String fieldName,
      Map<CharSequence, Object> keyAsColumnValues) {
    Schema schema = avroSchema.getAvroSchema();
    Field field = schema.getField(fieldName);
    if (field == null) {
      throw new SchemaValidationException("No field named " + fieldName
          + " in schema " + schema);
    }

    Schema.Type fieldType = field.schema().getType();
    if (fieldType == Schema.Type.MAP) {
      Map<CharSequence, Object> retMap = new HashMap<CharSequence, Object>();
      for (Entry<CharSequence, Object> entry : keyAsColumnValues.entrySet()) {
        retMap.put(entry.getKey(), entry.getValue());
      }
      return retMap;
    } else if (fieldType == Schema.Type.RECORD) {
      AvroRecordBuilder<E> builder = kacRecordBuilderFactories.get(fieldName)
          .getBuilder();
      for (Entry<CharSequence, Object> keyAsColumnEntry : keyAsColumnValues
          .entrySet()) {
        builder.put(keyAsColumnEntry.getKey().toString(),
            keyAsColumnEntry.getValue());
      }
      return builder.build();
    } else {
      throw new SchemaValidationException(
          "Only MAP or RECORD type valid for keyAsColumn fields. Found "
              + fieldType);
    }
  }
View Full Code Here

TOP

Related Classes of com.cloudera.cdk.data.SchemaValidationException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.