Package org.apache.hadoop.hive.ql.metadata

Examples of org.apache.hadoop.hive.ql.metadata.Table


   * @return Returns 0 when execution succeeds and above 0 if it fails.
   * @throws HiveException Throws this exception if an unexpected error occurs.
   */
  private int alterTable(Hive db, alterTableDesc alterTbl) throws HiveException {
    // alter the table
    Table tbl = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, alterTbl.getOldName());
    if (alterTbl.getOp() == alterTableDesc.alterTableTypes.RENAME)
      tbl.getTTable().setTableName(alterTbl.getNewName());
    else if (alterTbl.getOp() == alterTableDesc.alterTableTypes.ADDCOLS) {
      List<FieldSchema> newCols = alterTbl.getNewCols();
      List<FieldSchema> oldCols = tbl.getCols();
      if (tbl.getSerializationLib().equals("org.apache.hadoop.hive.serde.thrift.columnsetSerDe")) {
        console
            .printInfo("Replacing columns for columnsetSerDe and changing to LazySimpleSerDe");
        tbl.setSerializationLib(LazySimpleSerDe.class.getName());
        tbl.getTTable().getSd().setCols(newCols);
      } else {
        // make sure the columns does not already exist
        Iterator<FieldSchema> iterNewCols = newCols.iterator();
        while (iterNewCols.hasNext()) {
          FieldSchema newCol = iterNewCols.next();
          String newColName = newCol.getName();
          Iterator<FieldSchema> iterOldCols = oldCols.iterator();
          while (iterOldCols.hasNext()) {
            String oldColName = iterOldCols.next().getName();
            if (oldColName.equalsIgnoreCase(newColName)) {
              console.printError("Column '" + newColName + "' exists");
              return 1;
            }
          }
          oldCols.add(newCol);
        }
        tbl.getTTable().getSd().setCols(oldCols);
      }
    } else if (alterTbl.getOp() == alterTableDesc.alterTableTypes.REPLACECOLS) {
      // change SerDe to LazySimpleSerDe if it is columnsetSerDe
      if (tbl.getSerializationLib().equals("org.apache.hadoop.hive.serde.thrift.columnsetSerDe")) {
        console
            .printInfo("Replacing columns for columnsetSerDe and changing to LazySimpleSerDe");
        tbl.setSerializationLib(LazySimpleSerDe.class.getName());
      } else if (!tbl.getSerializationLib().equals(
          MetadataTypedColumnsetSerDe.class.getName())
          && !tbl.getSerializationLib().equals(
          LazySimpleSerDe.class.getName())
          && !tbl.getSerializationLib().equals(
          DynamicSerDe.class.getName())) {
        console
            .printError("Replace columns is not supported for this table. SerDe may be incompatible.");
        return 1;
      }
      tbl.getTTable().getSd().setCols(alterTbl.getNewCols());
    } else if (alterTbl.getOp() == alterTableDesc.alterTableTypes.ADDPROPS) {
      tbl.getTTable().getParameters().putAll(alterTbl.getProps());
    } else if (alterTbl.getOp() == alterTableDesc.alterTableTypes.ADDSERDEPROPS) {
      tbl.getTTable().getSd().getSerdeInfo().getParameters().putAll(
          alterTbl.getProps());
    } else if (alterTbl.getOp() == alterTableDesc.alterTableTypes.ADDSERDE) {
      tbl.setSerializationLib(alterTbl.getSerdeName());
      if ((alterTbl.getProps() != null) && (alterTbl.getProps().size() > 0))
        tbl.getTTable().getSd().getSerdeInfo().getParameters().putAll(
            alterTbl.getProps());
      // since serde is modified then do the appropriate things to reset columns
      // etc
      tbl.reinitSerDe();
      tbl.setFields(Hive.getFieldsFromDeserializer(tbl.getName(), tbl
          .getDeserializer()));
    } else {
      console.printError("Unsupported Alter commnad");
      return 1;
    }

    // set last modified by properties
    try {
      tbl.setProperty("last_modified_by", conf.getUser());
    } catch (IOException e) {
      console.printError("Unable to get current user: " + e.getMessage(), StringUtils.stringifyException(e));
      return 1;
    }
    tbl.setProperty("last_modified_time", Long.toString(System
        .currentTimeMillis() / 1000));

    try {
      tbl.checkValidity();
    } catch (HiveException e) {
      console.printError("Invalid table columns : " + e.getMessage(), StringUtils.stringifyException(e));
      return 1;
    }

View Full Code Here


    if (dropTbl.getPartSpecs() == null) {
      // drop the table
      db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, dropTbl.getTableName());
    } else {
      // drop partitions in the list
      Table tbl = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, dropTbl.getTableName());
      List<Partition> parts = new ArrayList<Partition>();
      for (Map<String, String> partSpec : dropTbl.getPartSpecs()) {
        Partition part = db.getPartition(tbl, partSpec, false);
        if (part == null) {
          console.printInfo("Partition " + partSpec + " does not exist.");
View Full Code Here

   * @return Returns 0 when execution succeeds and above 0 if it fails.
   * @throws HiveException Throws this exception if an unexpected error occurs.
   */
  private int createTable(Hive db, createTableDesc crtTbl) throws HiveException {
    // create the table
    Table tbl = new Table(crtTbl.getTableName());
    StorageDescriptor tblStorDesc = tbl.getTTable().getSd();
    if (crtTbl.getBucketCols() != null)
      tblStorDesc.setBucketCols(crtTbl.getBucketCols());
    if (crtTbl.getSortCols() != null)
      tbl.setSortCols(crtTbl.getSortCols());
    if (crtTbl.getPartCols() != null)
      tbl.setPartCols(crtTbl.getPartCols());
    if (crtTbl.getNumBuckets() != -1)
      tblStorDesc.setNumBuckets(crtTbl.getNumBuckets());

    if (crtTbl.getSerName() != null) {
      tbl.setSerializationLib(crtTbl.getSerName());
      if (crtTbl.getMapProp() != null) {
        Iterator<Map.Entry<String, String>> iter = crtTbl.getMapProp()
            .entrySet().iterator();
        while (iter.hasNext()) {
          Map.Entry<String, String> m = (Map.Entry<String, String>) iter.next();
          tbl.setSerdeParam(m.getKey(), m.getValue());
        }
      }
    } else {
      if (crtTbl.getFieldDelim() != null) {
        tbl.setSerdeParam(Constants.FIELD_DELIM, crtTbl.getFieldDelim());
        tbl.setSerdeParam(Constants.SERIALIZATION_FORMAT, crtTbl
            .getFieldDelim());
      }

      if (crtTbl.getCollItemDelim() != null)
        tbl
            .setSerdeParam(Constants.COLLECTION_DELIM, crtTbl
                .getCollItemDelim());
      if (crtTbl.getMapKeyDelim() != null)
        tbl.setSerdeParam(Constants.MAPKEY_DELIM, crtTbl.getMapKeyDelim());
      if (crtTbl.getLineDelim() != null)
        tbl.setSerdeParam(Constants.LINE_DELIM, crtTbl.getLineDelim());
    }

    /**
     * We use LazySimpleSerDe by default.
     *
     * If the user didn't specify a SerDe, and any of the columns are not simple types,
     * we will have to use DynamicSerDe instead.
     */
    if (crtTbl.getSerName() == null) {
      LOG.info("Default to LazySimpleSerDe for table " + crtTbl.getTableName() );
      tbl.setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
    }

    if (crtTbl.getComment() != null)
      tbl.setProperty("comment", crtTbl.getComment());
    if (crtTbl.getLocation() != null)
      tblStorDesc.setLocation(crtTbl.getLocation());

    tbl.setInputFormatClass(crtTbl.getInputFormat());
    tbl.setOutputFormatClass(crtTbl.getOutputFormat());

    if (crtTbl.isExternal())
      tbl.setProperty("EXTERNAL", "TRUE");

    // If the sorted columns is a superset of bucketed columns, store this fact.
    // It can be later used to
    // optimize some group-by queries. Note that, the order does not matter as
    // long as it in the first
    // 'n' columns where 'n' is the length of the bucketed columns.
    if ((tbl.getBucketCols() != null) && (tbl.getSortCols() != null)) {
      List<String> bucketCols = tbl.getBucketCols();
      List<Order> sortCols = tbl.getSortCols();

      if ((sortCols.size() > 0) && (sortCols.size() >= bucketCols.size())) {
        boolean found = true;

        Iterator<String> iterBucketCols = bucketCols.iterator();
        while (iterBucketCols.hasNext()) {
          String bucketCol = iterBucketCols.next();
          boolean colFound = false;
          for (int i = 0; i < bucketCols.size(); i++) {
            if (bucketCol.equals(sortCols.get(i).getCol())) {
              colFound = true;
              break;
            }
          }
          if (colFound == false) {
            found = false;
            break;
          }
        }
        if (found)
          tbl.setProperty("SORTBUCKETCOLSPREFIX", "TRUE");
      }
    }

    try {
      tbl.setOwner(conf.getUser());
    } catch (IOException e) {
      console.printError("Unable to get current user: " + e.getMessage(), StringUtils.stringifyException(e));
      return 1;
    }
    // set create time
    tbl.getTTable().setCreateTime((int) (System.currentTimeMillis() / 1000));

    if (crtTbl.getCols() != null) {
      tbl.setFields(crtTbl.getCols());
    }

    // create the table
    db.createTable(tbl, crtTbl.getIfNotExists());
    return 0;
View Full Code Here

    LinkedList<String> part_cols = new LinkedList<String>();
    part_cols.add("ds");
    part_cols.add("hr");
    db.createTable("srcpart", cols, part_cols, TextInputFormat.class, IgnoreKeyTextOutputFormat.class);
    srcTables.add("srcpart");
    Table srcpart = db.getTable("srcpart");
   
    Path fpath;
    Path newfpath;
    HashMap<String, String> part_spec = new HashMap<String, String>();
    String loadCmd;
    for (String ds: new String[]{"2008-04-08", "2008-04-09"}) {
      for (String hr: new String[]{"11", "12"}) {
        part_spec.clear();
        part_spec.put("ds", ds);
        part_spec.put("hr", hr);
        // System.out.println("Loading partition with spec: " + part_spec);
        db.createPartition(srcpart, part_spec);
        fpath = new Path(testFiles, "kv1.txt");
        newfpath = new Path(tmppath, "kv1.txt");
        fs.copyFromLocalFile(false, true, fpath, newfpath);
        fpath = newfpath;
        //db.loadPartition(fpath, srcpart.getName(), part_spec, true);
        runLoadCmd("LOAD DATA INPATH '" +  newfpath.toString() +
                   "' INTO TABLE srcpart PARTITION (ds='" + ds + "',hr='" + hr +"')");
      }
    }
    ArrayList<String> bucketCols = new ArrayList<String>();
    bucketCols.add("key");
    db.createTable("srcbucket", cols, null, TextInputFormat.class, IgnoreKeyTextOutputFormat.class, 2, bucketCols);
    srcTables.add("srcbucket");
    for (String fname: new String [] {"kv1.txt", "kv2.txt"}) {
      fpath = new Path(testFiles, fname);
      newfpath = new Path(tmppath, fname);
      fs.copyFromLocalFile(false, true, fpath, newfpath);
      runLoadCmd("LOAD DATA INPATH '" +  newfpath.toString() + "' INTO TABLE srcbucket");
    }
   
    for (String tname: new String [] {"src", "src1"}) {
      db.createTable(tname, cols, null, TextInputFormat.class, IgnoreKeyTextOutputFormat.class);
      srcTables.add(tname);
    }
    db.createTable("src_sequencefile", cols, null, SequenceFileInputFormat.class, SequenceFileOutputFormat.class);
    srcTables.add("src_sequencefile");
   
    Table srcThrift = new Table("src_thrift");
    srcThrift.setInputFormatClass(SequenceFileInputFormat.class.getName());
    srcThrift.setOutputFormatClass(SequenceFileOutputFormat.class.getName());
    srcThrift.setSerializationLib(ThriftDeserializer.class.getName());
    srcThrift.setSerdeParam(Constants.SERIALIZATION_CLASS, Complex.class.getName());
    srcThrift.setSerdeParam(Constants.SERIALIZATION_FORMAT, TBinaryProtocol.class.getName());
    db.createTable(srcThrift);
    srcTables.add("src_thrift");
   
    LinkedList<String> json_cols = new LinkedList<String>();
    json_cols.add("json");
View Full Code Here

    db.createTable("dest1", cols, null, TextInputFormat.class, IgnoreKeyTextOutputFormat.class);
    db.createTable("dest2", cols, null, TextInputFormat.class, IgnoreKeyTextOutputFormat.class);
  
    db.createTable("dest3", cols, part_cols, TextInputFormat.class, IgnoreKeyTextOutputFormat.class);
    Table dest3 = db.getTable("dest3");

    HashMap<String, String> part_spec = new HashMap<String, String>();
    part_spec.put("ds", "2008-04-08");
    part_spec.put("hr", "12");
    db.createPartition(dest3, part_spec);
View Full Code Here

   * @throws HiveException
   *           Throws this exception if an unexpected error occurs.
   */
  private int createTableLike(Hive db, CreateTableLikeDesc crtTbl) throws HiveException {
    // Get the existing table
    Table oldtbl = db.getTable(crtTbl.getLikeTableName());
    Table tbl;
    if (oldtbl.getTableType() == TableType.VIRTUAL_VIEW) {
      String targetTableName = crtTbl.getTableName();
      tbl=db.newTable(targetTableName);

      if (crtTbl.getTblProps() != null) {
        tbl.getTTable().getParameters().putAll(crtTbl.getTblProps());
      }

      tbl.setTableType(TableType.MANAGED_TABLE);

      if (crtTbl.isExternal()) {
        tbl.setProperty("EXTERNAL", "TRUE");
        tbl.setTableType(TableType.EXTERNAL_TABLE);
      }

      tbl.setFields(oldtbl.getCols());
      tbl.setPartCols(oldtbl.getPartCols());

      if (crtTbl.getDefaultSerName() == null) {
        LOG.info("Default to LazySimpleSerDe for table " + crtTbl.getTableName());
        tbl.setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
      } else {
        // let's validate that the serde exists
        validateSerDe(crtTbl.getDefaultSerName());
        tbl.setSerializationLib(crtTbl.getDefaultSerName());
      }

      if (crtTbl.getDefaultSerdeProps() != null) {
        Iterator<Entry<String, String>> iter = crtTbl.getDefaultSerdeProps().entrySet()
          .iterator();
        while (iter.hasNext()) {
          Entry<String, String> m = iter.next();
          tbl.setSerdeParam(m.getKey(), m.getValue());
        }
      }

      tbl.setInputFormatClass(crtTbl.getDefaultInputFormat());
      tbl.setOutputFormatClass(crtTbl.getDefaultOutputFormat());

      tbl.getTTable().getSd().setInputFormat(
          tbl.getInputFormatClass().getName());
      tbl.getTTable().getSd().setOutputFormat(
          tbl.getOutputFormatClass().getName());
    } else {
      tbl=oldtbl;

      // find out database name and table name of target table
      String targetTableName = crtTbl.getTableName();
      Table newTable = db.newTable(targetTableName);

      tbl.setDbName(newTable.getDbName());
      tbl.setTableName(newTable.getTableName());

      if (crtTbl.getLocation() != null) {
        tbl.setDataLocation(new Path(crtTbl.getLocation()).toUri());
      } else {
        tbl.unsetDataLocation();
View Full Code Here

   * @return Returns 0 when execution succeeds and above 0 if it fails.
   * @throws HiveException
   *           Throws this exception if an unexpected error occurs.
   */
  private int createView(Hive db, CreateViewDesc crtView) throws HiveException {
    Table oldview = db.getTable(crtView.getViewName(), false);
    if (crtView.getOrReplace() && oldview != null) {
      // replace existing view
      // remove the existing partition columns from the field schema
      oldview.setViewOriginalText(crtView.getViewOriginalText());
      oldview.setViewExpandedText(crtView.getViewExpandedText());
      oldview.setFields(crtView.getSchema());
      if (crtView.getComment() != null) {
        oldview.setProperty("comment", crtView.getComment());
      }
      if (crtView.getTblProps() != null) {
        oldview.getTTable().getParameters().putAll(crtView.getTblProps());
      }
      oldview.setPartCols(crtView.getPartCols());
      oldview.checkValidity();
      try {
        db.alterTable(crtView.getViewName(), oldview);
      } catch (InvalidOperationException e) {
        throw new HiveException(e);
      }
      work.getOutputs().add(new WriteEntity(oldview));
    } else {
      // create new view
      Table tbl = db.newTable(crtView.getViewName());
      tbl.setTableType(TableType.VIRTUAL_VIEW);
      tbl.setSerializationLib(null);
      tbl.clearSerDeInfo();
      tbl.setViewOriginalText(crtView.getViewOriginalText());
      tbl.setViewExpandedText(crtView.getViewExpandedText());
      tbl.setFields(crtView.getSchema());
      if (crtView.getComment() != null) {
        tbl.setProperty("comment", crtView.getComment());
      }
      if (crtView.getTblProps() != null) {
        tbl.getTTable().getParameters().putAll(crtView.getTblProps());
      }

      if (crtView.getPartCols() != null) {
        tbl.setPartCols(crtView.getPartCols());
      }

      int rc = setGenericTableAttributes(tbl);
      if (rc != 0) {
        return rc;
View Full Code Here

    }

    String tableName = truncateTableDesc.getTableName();
    Map<String, String> partSpec = truncateTableDesc.getPartSpec();

    Table table = db.getTable(tableName, true);

    try {
      // this is not transactional
      for (Path location : getLocations(db, table, partSpec)) {
        FileSystem fs = location.getFileSystem(conf);
View Full Code Here

  }

  private int exchangeTablePartition(Hive db,
      AlterTableExchangePartition exchangePartition) throws HiveException {
    Map<String, String> partitionSpecs = exchangePartition.getPartitionSpecs();
    Table destTable = exchangePartition.getDestinationTable();
    Table sourceTable = exchangePartition.getSourceTable();
    db.exchangeTablePartitions(partitionSpecs, sourceTable.getDbName(),
        sourceTable.getTableName(),destTable.getDbName(),
        destTable.getTableName());
    return 0;
  }
View Full Code Here

      } else {
        String obj = hiveObjectDesc.getObject();
        boolean notFound = true;
        String dbName = null;
        String tableName = null;
        Table tableObj = null;
        Database dbObj = null;

        if (hiveObjectDesc.getTable()) {
          String[] dbTab = obj.split("\\.");
          if (dbTab.length == 2) {
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.metadata.Table

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.