Package com.cloudera.cdk.data

Examples of com.cloudera.cdk.data.DatasetException


  @Override
  public Increment mapToIncrement(PartitionKey key, String fieldName,
      long amount) {
    FieldMapping fieldMapping = entitySchema.getFieldMapping(fieldName);
    if (fieldMapping == null) {
      throw new DatasetException("Unknown field in the schema: "
          + fieldName);
    }
    if (fieldMapping.getMappingType() != MappingType.COUNTER) {
      throw new DatasetException("Field is not a counter type: "
          + fieldName);
    }

    byte[] keyBytes = keySerDe.serialize(key);
    Increment increment = new Increment(keyBytes);
View Full Code Here


  @Override
  public long mapFromIncrementResult(Result result, String fieldName) {
    FieldMapping fieldMapping = entitySchema.getFieldMapping(fieldName);
    if (fieldMapping == null) {
      throw new DatasetException("Unknown field in the schema: "
          + fieldName);
    }
    if (fieldMapping.getMappingType() != MappingType.COUNTER) {
      throw new DatasetException("Field is not a counter type: "
          + fieldName);
    }
    return (Long) entitySerDe.deserialize(fieldMapping, result);
  }
View Full Code Here

        table.addFamily(new HColumnDescriptor("schema"));
        table.addFamily(new HColumnDescriptor("_s"));
        hbaseAdmin.createTable(table);
      }
    } catch (IOException e) {
      throw new DatasetException(e);
    }

    String entitySchemaString = descriptor.getSchema().toString(true);

    AvroKeyEntitySchemaParser parser = new AvroKeyEntitySchemaParser();
    AvroEntitySchema entitySchema = parser.parseEntitySchema(entitySchemaString);

    String tableName = getTableName(name);
    String entityName = getEntityName(name);

    schemaManager.refreshManagedSchemaCache(tableName, entityName);
    schemaManager.createSchema(tableName, entityName,
        entitySchemaString,
        "com.cloudera.cdk.data.hbase.avro.AvroKeyEntitySchemaParser",
        "com.cloudera.cdk.data.hbase.avro.AvroKeySerDe",
        "com.cloudera.cdk.data.hbase.avro.AvroEntitySerDe");

    try {
      if (!hbaseAdmin.tableExists(tableName)) {
        HTableDescriptor desc = new HTableDescriptor(tableName);
        desc.addFamily(new HColumnDescriptor(Constants.SYS_COL_FAMILY));
        desc.addFamily(new HColumnDescriptor(Constants.OBSERVABLE_COL_FAMILY));
        for (String columnFamily : entitySchema.getRequiredColumnFamilies()) {
          desc.addFamily(new HColumnDescriptor(columnFamily));
        }
        hbaseAdmin.createTable(desc);
      } else {
        Set<String> familiesToAdd = entitySchema.getRequiredColumnFamilies();
        familiesToAdd.add(new String(Constants.SYS_COL_FAMILY));
        familiesToAdd.add(new String(Constants.OBSERVABLE_COL_FAMILY));
        HTableDescriptor desc = hbaseAdmin.getTableDescriptor(tableName
            .getBytes());
        for (HColumnDescriptor columnDesc : desc.getColumnFamilies()) {
          String familyName = columnDesc.getNameAsString();
          if (familiesToAdd.contains(familyName)) {
            familiesToAdd.remove(familyName);
          }
        }
        if (familiesToAdd.size() > 0) {
          hbaseAdmin.disableTable(tableName);
          try {
            for (String family : familiesToAdd) {
              hbaseAdmin.addColumn(tableName, new HColumnDescriptor(family));
            }
          } finally {
            hbaseAdmin.enableTable(tableName);
          }
        }
      }
    } catch (IOException e) {
      throw new DatasetException(e);
    }
    return withPartitionStrategy(descriptor);
  }
View Full Code Here

    Symbol rootSymbol;
    try {
      ResolvingGrammarGenerator g = new ResolvingGrammarGenerator();
      rootSymbol = g.generate(writer, reader);
    } catch (IOException e) {
      throw new DatasetException("IOException while generating grammar.", e);
    }

    return !hasErrorSymbol(rootSymbol);
  }
View Full Code Here

    ds.dropPartition(partitionStrategy.partitionKey(1));
    Assert.assertFalse(
      fileSystem.isDirectory(new Path(testDirectory, "username=1")));

    DatasetException caught = null;

    try {
      ds.dropPartition(partitionStrategy.partitionKey(0));
    } catch (DatasetException e) {
      caught = e;
View Full Code Here

          schemaDirectory.substring(CLASSPATH_PREFIX.length()));
      if (dirURL != null && dirURL.getProtocol().equals("file")) {
        try {
          schemaStrings = getSchemaStringsFromDir(new File(dirURL.toURI()));
        } catch (URISyntaxException e) {
          throw new DatasetException(e);
        }
      } else if (dirURL != null && dirURL.getProtocol().equals("jar")) {
        String jarPath = dirURL.getPath().substring(5,
            dirURL.getPath().indexOf("!"));
        schemaStrings = getSchemaStringsFromJar(jarPath,
            schemaDirectory.substring(CLASSPATH_PREFIX.length()));
      } else {
        String msg = "Could not find classpath resource: " + schemaDirectory;
        LOG.error(msg);
        throw new DatasetException(msg);
      }
    } else {
      schemaStrings = getSchemaStringsFromDir(new File(schemaDirectory));
    }

    Map<String, String> tableKeySchemaMap = new HashMap<String, String>();
    Map<String, List<String>> tableEntitySchemaMap = new HashMap<String, List<String>>();
    for (String schemaString : schemaStrings) {
      String name = getEntityNameFromSchemaString(schemaString);
      List<String> tables = getTablesFromSchemaString(schemaString);
      if (name.endsWith("StorageKey")) {
        for (String table : tables) {
          if (tableKeySchemaMap.containsKey(table)) {
            String msg = "Multiple keys for table: " + table;
            LOG.error(msg);
            throw new SchemaValidationException(msg);
          }
          LOG.debug("Adding key to tableKeySchemaMap for table: " + table
              + ". " + schemaString);
          tableKeySchemaMap.put(table, schemaString);
        }
      } else {
        for (String table : tables) {
          if (tableEntitySchemaMap.containsKey(table)) {
            tableEntitySchemaMap.get(table).add(schemaString);
          } else {
            List<String> entityList = new ArrayList<String>();
            entityList.add(schemaString);
            tableEntitySchemaMap.put(table, entityList);
          }
        }
      }
    }

    for (Entry<String, List<String>> entry : tableEntitySchemaMap.entrySet()) {
      String table = entry.getKey();
      List<String> entitySchemas = entry.getValue();
      if (!tableKeySchemaMap.containsKey(table)) {
        String msg = "No StorageKey Schema For Table: " + table;
        LOG.error(msg);
        throw new DatasetException(msg);
      }
      if (entitySchemas.size() == 0) {
        String msg = "StorageKey, but no entity schemas for Table: " + table;
        LOG.error(msg);
        throw new SchemaValidationException(msg);
View Full Code Here

              hbaseAdmin.enableTable(tableName);
            }
          }
        }
      } catch (IOException e) {
        throw new DatasetException(e);
      }
    }
  }
View Full Code Here

    FileInputStream fis = null;
    try {
      fis = new FileInputStream(schemaFile);
      schemaString = AvroUtils.inputStreamToString(fis);
    } catch (IOException e) {
      throw new DatasetException(e);
    } finally {
      if (fis != null) {
        try {
          fis.close();
        } catch (IOException e) {
View Full Code Here

        + jarPath);
    JarFile jar;
    try {
      jar = new JarFile(URLDecoder.decode(jarPath, "UTF-8"));
    } catch (UnsupportedEncodingException e) {
      throw new DatasetException(e);
    } catch (IOException e) {
      throw new DatasetException(e);
    }
    Enumeration<JarEntry> entries = jar.entries();
    List<String> schemaStrings = new ArrayList<String>();
    while (entries.hasMoreElements()) {
      JarEntry jarEntry = entries.nextElement();
      if (jarEntry.getName().startsWith(directoryPath)
          && jarEntry.getName().endsWith(".avsc")) {
        LOG.info("Found schema: " + jarEntry.getName());
        InputStream inputStream;
        try {
          inputStream = jar.getInputStream(jarEntry);
        } catch (IOException e) {
          throw new DatasetException(e);
        }
        String schemaString = AvroUtils.inputStreamToString(inputStream);
        schemaStrings.add(schemaString);
      }
    }
View Full Code Here

            entitySchema);
      } catch (ClassNotFoundException e) {
        String msg = "StorageKey or entity class not found. Make sure the specific "
            + "record instances are on the classpath.";
        LOG.error(msg, e);
        throw new DatasetException(msg, e);
      } catch (SecurityException e) {
        String msg = "Cannot access key or entity class.";
        LOG.error(msg, e);
        throw new DatasetException(msg, e);
      } catch (NoSuchFieldException e) {
        String msg = "SCHEMA$ field not found in the entity class";
        LOG.error(msg, e);
        throw new DatasetException(msg, e);
      } catch (IllegalAccessException e) {
        String msg = "Not allowed to access SCHEMA$ field in the entity class";
        LOG.error(msg, e);
        throw new DatasetException(msg, e);
      }
    }

    // Initialize the entity mappers this object wraps. There will be one entity
    // mapper per version of the schema. When deserializing a row, we'll use the
View Full Code Here

TOP

Related Classes of com.cloudera.cdk.data.DatasetException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.