Package org.apache.hadoop.hive.metastore.api

Examples of org.apache.hadoop.hive.metastore.api.StorageDescriptor


      client.createType(typ1);

      Table tbl = new Table();
      tbl.setDbName(dbName);
      tbl.setTableName(tblName);
      StorageDescriptor sd = new StorageDescriptor();
      tbl.setSd(sd);
      sd.setCols(typ1.getFields());
      sd.setCompressed(false);
      sd.setNumBuckets(1);
      sd.setParameters(new HashMap<String, String>());
      sd.getParameters().put("test_param_1", "Use this for comments etc");
      sd.setBucketCols(new ArrayList<String>(2));
      sd.getBucketCols().add("name");
      sd.setSerdeInfo(new SerDeInfo());
      sd.getSerdeInfo().setName(tbl.getTableName());
      sd.getSerdeInfo().setParameters(new HashMap<String, String>());
      sd.getSerdeInfo().getParameters()
          .put(serdeConstants.SERIALIZATION_FORMAT, "1");
      sd.setSortCols(new ArrayList<Order>());
      sd.setStoredAsSubDirectories(false);

      //skewed information
      SkewedInfo skewInfor = new SkewedInfo();
      skewInfor.setSkewedColNames(Arrays.asList("name"));
      List<String> skv = Arrays.asList("1");
      skewInfor.setSkewedColValues(Arrays.asList(skv));
      Map<List<String>, String> scvlm = new HashMap<List<String>, String>();
      scvlm.put(skv, "location1");
      skewInfor.setSkewedColValueLocationMaps(scvlm);
      sd.setSkewedInfo(skewInfor);

      tbl.setPartitionKeys(new ArrayList<FieldSchema>(2));
      tbl.getPartitionKeys().add(
          new FieldSchema("ds", serdeConstants.STRING_TYPE_NAME, ""));
      tbl.getPartitionKeys().add(
View Full Code Here


    cols.add(new FieldSchema("income", serdeConstants.INT_TYPE_NAME, ""));

    Table tbl = new Table();
    tbl.setDbName(dbName);
    tbl.setTableName(tblName);
    StorageDescriptor sd = new StorageDescriptor();
    tbl.setSd(sd);
    sd.setCols(cols);
    sd.setCompressed(false);
    sd.setParameters(new HashMap<String, String>());
    sd.setSerdeInfo(new SerDeInfo());
    sd.getSerdeInfo().setName(tbl.getTableName());
    sd.getSerdeInfo().setParameters(new HashMap<String, String>());
    sd.getSerdeInfo().getParameters()
        .put(serdeConstants.SERIALIZATION_FORMAT, "1");
    sd.setSortCols(new ArrayList<Order>());

    client.createTable(tbl);

    if (isThriftClient) {
      // the createTable() above does not update the location in the 'tbl'
      // object when the client is a thrift client and the code below relies
      // on the location being present in the 'tbl' object - so get the table
      // from the metastore
      tbl = client.getTable(dbName, tblName);
    }

    ArrayList<FieldSchema> viewCols = new ArrayList<FieldSchema>(1);
    viewCols.add(new FieldSchema("income", serdeConstants.INT_TYPE_NAME, ""));

    ArrayList<FieldSchema> viewPartitionCols = new ArrayList<FieldSchema>(1);
    viewPartitionCols.add(new FieldSchema("name", serdeConstants.STRING_TYPE_NAME, ""));

    Table view = new Table();
    view.setDbName(dbName);
    view.setTableName(viewName);
    view.setTableType(TableType.VIRTUAL_VIEW.name());
    view.setPartitionKeys(viewPartitionCols);
    view.setViewOriginalText("SELECT income, name FROM " + tblName);
    view.setViewExpandedText("SELECT `" + tblName + "`.`income`, `" + tblName +
        "`.`name` FROM `" + dbName + "`.`" + tblName + "`");
    StorageDescriptor viewSd = new StorageDescriptor();
    view.setSd(viewSd);
    viewSd.setCols(viewCols);
    viewSd.setCompressed(false);
    viewSd.setParameters(new HashMap<String, String>());
    viewSd.setSerdeInfo(new SerDeInfo());
    viewSd.getSerdeInfo().setParameters(new HashMap<String, String>());

    client.createTable(view);

    if (isThriftClient) {
      // the createTable() above does not update the location in the 'tbl'
View Full Code Here

      cols.add(new FieldSchema("income", serdeConstants.INT_TYPE_NAME, ""));

      Table tbl = new Table();
      tbl.setDbName(dbName);
      tbl.setTableName(tblName);
      StorageDescriptor sd = new StorageDescriptor();
      tbl.setSd(sd);
      sd.setCols(cols);
      sd.setCompressed(false);
      sd.setNumBuckets(1);
      sd.setParameters(new HashMap<String, String>());
      sd.getParameters().put("test_param_1", "Use this for comments etc");
      sd.setBucketCols(new ArrayList<String>(2));
      sd.getBucketCols().add("name");
      sd.setSerdeInfo(new SerDeInfo());
      sd.getSerdeInfo().setName(tbl.getTableName());
      sd.getSerdeInfo().setParameters(new HashMap<String, String>());
      sd.getSerdeInfo().getParameters()
          .put(serdeConstants.SERIALIZATION_FORMAT, "1");
      sd.setSortCols(new ArrayList<Order>());

      tbl.setPartitionKeys(new ArrayList<FieldSchema>(2));
      tbl.getPartitionKeys().add(
          new FieldSchema("ds", serdeConstants.STRING_TYPE_NAME, ""));
      tbl.getPartitionKeys().add(
View Full Code Here

      cols.add(new FieldSchema("income", serdeConstants.INT_TYPE_NAME, ""));

      Table tbl = new Table();
      tbl.setDbName(dbName);
      tbl.setTableName(tblName);
      StorageDescriptor sd = new StorageDescriptor();
      tbl.setSd(sd);
      sd.setCols(cols);
      sd.setCompressed(false);
      sd.setNumBuckets(1);
      sd.setParameters(new HashMap<String, String>());
      sd.getParameters().put("test_param_1", "Use this for comments etc");
      sd.setBucketCols(new ArrayList<String>(2));
      sd.getBucketCols().add("name");
      sd.setSerdeInfo(new SerDeInfo());
      sd.getSerdeInfo().setName(tbl.getTableName());
      sd.getSerdeInfo().setParameters(new HashMap<String, String>());
      sd.getSerdeInfo().getParameters()
          .put(serdeConstants.SERIALIZATION_FORMAT, "1");
      sd.setSortCols(new ArrayList<Order>());

      tbl.setPartitionKeys(new ArrayList<FieldSchema>(2));
      tbl.getPartitionKeys().add(
          new FieldSchema("ds", serdeConstants.STRING_TYPE_NAME, ""));
      tbl.getPartitionKeys().add(
View Full Code Here

    fields.add(new FieldSchema("colname", serdeConstants.STRING_TYPE_NAME, ""));

    Table tbl = new Table();
    tbl.setDbName(dbName);
    tbl.setTableName(tblName);
    StorageDescriptor sd = new StorageDescriptor();
    sd.setCols(fields);
    tbl.setSd(sd);

    //sd.setLocation("hdfs://tmp");
    sd.setInputFormat(RCFileInputFormat.class.getName());
    sd.setOutputFormat(RCFileOutputFormat.class.getName());
    sd.setParameters(new HashMap<String, String>());
    sd.getParameters().put("test_param_1", "Use this for comments etc");
    //sd.setBucketCols(new ArrayList<String>(2));
    //sd.getBucketCols().add("name");
    sd.setSerdeInfo(new SerDeInfo());
    sd.getSerdeInfo().setName(tbl.getTableName());
    sd.getSerdeInfo().setParameters(new HashMap<String, String>());
    sd.getSerdeInfo().getParameters().put(serdeConstants.SERIALIZATION_FORMAT, "1");
    sd.getSerdeInfo().setSerializationLib(
        org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
    tbl.setPartitionKeys(fields);

    Map<String, String> tableParams = new HashMap<String, String>();
    tableParams.put("hcat.testarg", "testArgValue");
View Full Code Here

      client.createType(typ1);

      Table tbl = new Table();
      tbl.setDbName(dbName);
      tbl.setTableName(tblName);
      StorageDescriptor sd = new StorageDescriptor();
      tbl.setSd(sd);
      sd.setCols(typ1.getFields());
      sd.setCompressed(false);
      sd.setNumBuckets(1);
      sd.setParameters(new HashMap<String, String>());
      sd.getParameters().put("test_param_1", "Use this for comments etc");
      sd.setBucketCols(new ArrayList<String>(2));
      sd.getBucketCols().add("name");
      sd.setSerdeInfo(new SerDeInfo());
      sd.getSerdeInfo().setName(tbl.getTableName());
      sd.getSerdeInfo().setParameters(new HashMap<String, String>());
      sd.getSerdeInfo().getParameters().put(
          org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT, "1");
      sd.getSerdeInfo().setSerializationLib(
          org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
      tbl.setPartitionKeys(new ArrayList<FieldSchema>());

      client.createTable(tbl);

      if (isThriftClient) {
        // the createTable() above does not update the location in the 'tbl'
        // object when the client is a thrift client and the code below relies
        // on the location being present in the 'tbl' object - so get the table
        // from the metastore
        tbl = client.getTable(dbName, tblName);
      }

      Table tbl2 = client.getTable(dbName, tblName);
      assertNotNull(tbl2);
      assertEquals(tbl2.getDbName(), dbName);
      assertEquals(tbl2.getTableName(), tblName);
      assertEquals(tbl2.getSd().getCols().size(), typ1.getFields().size());
      assertEquals(tbl2.getSd().isCompressed(), false);
      assertEquals(tbl2.getSd().getNumBuckets(), 1);
      assertEquals(tbl2.getSd().getLocation(), tbl.getSd().getLocation());
      assertNotNull(tbl2.getSd().getSerdeInfo());
      sd.getSerdeInfo().setParameters(new HashMap<String, String>());
      sd.getSerdeInfo().getParameters().put(
          org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT, "1");

      tbl2.setTableName(tblName2);
      tbl2.setParameters(new HashMap<String, String>());
      tbl2.getParameters().put("EXTERNAL", "TRUE");
View Full Code Here

      invCols.add(new FieldSchema("in.come", serdeConstants.INT_TYPE_NAME, ""));

      Table tbl = new Table();
      tbl.setDbName(dbName);
      tbl.setTableName(invTblName);
      StorageDescriptor sd = new StorageDescriptor();
      tbl.setSd(sd);
      sd.setCols(invCols);
      sd.setCompressed(false);
      sd.setNumBuckets(1);
      sd.setParameters(new HashMap<String, String>());
      sd.getParameters().put("test_param_1", "Use this for comments etc");
      sd.setBucketCols(new ArrayList<String>(2));
      sd.getBucketCols().add("name");
      sd.setSerdeInfo(new SerDeInfo());
      sd.getSerdeInfo().setName(tbl.getTableName());
      sd.getSerdeInfo().setParameters(new HashMap<String, String>());
      sd.getSerdeInfo().getParameters().put(
          org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT, "1");
      boolean failed = false;
      try {
        client.createTable(tbl);
      } catch (InvalidObjectException ex) {
View Full Code Here


  @Override
  public void analyzeIndexDefinition(Table baseTable, Index index,
      Table indexTable) throws HiveException {
    StorageDescriptor storageDesc = index.getSd();
    if (this.usesIndexTable() && indexTable != null) {
      StorageDescriptor indexTableSd = storageDesc.deepCopy();
      List<FieldSchema> indexTblCols = indexTableSd.getCols();
      FieldSchema bucketFileName = new FieldSchema("_bucketname", "string", "");
      indexTblCols.add(bucketFileName);
      FieldSchema offSets = new FieldSchema("_offsets", "array<bigint>", "");
      indexTblCols.add(offSets);
      indexTable.setSd(indexTableSd);
View Full Code Here

    fields.add(new FieldSchema("a", serdeConstants.STRING_TYPE_NAME, ""));

    Table ttbl = new Table();
    ttbl.setDbName(dbName);
    ttbl.setTableName(tblName);
    StorageDescriptor sd = new StorageDescriptor();
    ttbl.setSd(sd);
    sd.setCols(fields);
    sd.setParameters(new HashMap<String, String>());
    sd.getParameters().put("test_param_1", "Use this for comments etc");
    sd.setSerdeInfo(new SerDeInfo());
    sd.getSerdeInfo().setName(ttbl.getTableName());
    sd.getSerdeInfo().setParameters(new HashMap<String, String>());
    sd.getSerdeInfo().getParameters().put(
        org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT, "1");
    sd.getSerdeInfo().setSerializationLib(
        org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
    ttbl.setPartitionKeys(new ArrayList<FieldSchema>());

    MetaException me = null;
    try {
View Full Code Here


  @Override
  public void analyzeIndexDefinition(Table baseTable, Index index,
      Table indexTable) throws HiveException {
    StorageDescriptor storageDesc = index.getSd();
    if (this.usesIndexTable() && indexTable != null) {
      StorageDescriptor indexTableSd = storageDesc.deepCopy();
      List<FieldSchema> indexTblCols = indexTableSd.getCols();
      FieldSchema bucketFileName = new FieldSchema("_bucketname", "string", "");
      indexTblCols.add(bucketFileName);
      FieldSchema offSets = new FieldSchema("_offsets", "array<bigint>", "");
      indexTblCols.add(offSets);
      indexTable.setSd(indexTableSd);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.metastore.api.StorageDescriptor

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.