Package org.apache.hadoop.hive.ql.metadata

Examples of org.apache.hadoop.hive.ql.metadata.Table


   *           Throws this exception if an unexpected error occurs.
   */
  private int showPartitions(Hive db, ShowPartitionsDesc showParts) throws HiveException {
    // get the partitions for the table and populate the output
    String tabName = showParts.getTabName();
    Table tbl = null;
    List<String> parts = null;

    tbl = db.getTable(tabName);

    if (!tbl.isPartitioned()) {
      throw new HiveException(ErrorMsg.TABLE_NOT_PARTITIONED, tabName);
    }
    if (showParts.getPartSpec() != null) {
      parts = db.getPartitionNames(tbl.getDbName(),
          tbl.getTableName(), showParts.getPartSpec(), (short) -1);
    } else {
      parts = db.getPartitionNames(tbl.getDbName(), tbl.getTableName(), (short) -1);
    }

    // write the results in the file
    DataOutputStream outStream = null;
    try {
View Full Code Here


    final String ROW_FORMAT = "row_format";
    final String TBL_LOCATION = "tbl_location";
    final String TBL_PROPERTIES = "tbl_properties";

    String tableName = showCreateTbl.getTableName();
    Table tbl = db.getTable(tableName, false);
    DataOutput outStream = null;
    List<String> duplicateProps = new ArrayList<String>();
    try {
      Path resFile = new Path(showCreateTbl.getResFile());
      FileSystem fs = resFile.getFileSystem(conf);
      outStream = fs.create(resFile);

      if (tbl.isView()) {
        String createTab_stmt = "CREATE VIEW " + tableName + " AS " + tbl.getViewExpandedText();
        outStream.writeBytes(createTab_stmt.toString());
        ((FSDataOutputStream) outStream).close();
        outStream = null;
        return 0;
      }

      ST createTab_stmt = new ST("CREATE <" + EXTERNAL + "> TABLE " +
          tableName + "(\n" +
          "<" + LIST_COLUMNS + ">)\n" +
          "<" + TBL_COMMENT + ">\n" +
          "<" + LIST_PARTITIONS + ">\n" +
          "<" + SORT_BUCKET + ">\n" +
          "<" + ROW_FORMAT + ">\n" +
          "LOCATION\n" +
          "<" + TBL_LOCATION + ">\n" +
          "TBLPROPERTIES (\n" +
          "<" + TBL_PROPERTIES + ">)\n");

      // For cases where the table is external
      String tbl_external = "";
      if (tbl.getTableType() == TableType.EXTERNAL_TABLE) {
        duplicateProps.add("EXTERNAL");
        tbl_external = "EXTERNAL";
      }

      // Columns
      String tbl_columns = "";
      List<FieldSchema> cols = tbl.getCols();
      List<String> columns = new ArrayList<String>();
      for (FieldSchema col : cols) {
        String columnDesc = "  " + col.getName() + " " + col.getType();
        if (col.getComment() != null) {
          columnDesc = columnDesc + " COMMENT '" + escapeHiveCommand(col.getComment()) + "'";
        }
        columns.add(columnDesc);
      }
      tbl_columns = StringUtils.join(columns, ", \n");

      // Table comment
      String tbl_comment = "";
      String tabComment = tbl.getProperty("comment");
      if (tabComment != null) {
        duplicateProps.add("comment");
        tbl_comment = "COMMENT '" + escapeHiveCommand(tabComment) + "'";
      }

      // Partitions
      String tbl_partitions = "";
      List<FieldSchema> partKeys = tbl.getPartitionKeys();
      if (partKeys.size() > 0) {
        tbl_partitions += "PARTITIONED BY ( \n";
        List<String> partCols = new ArrayList<String>();
        for (FieldSchema partKey : partKeys) {
          String partColDesc = "  " + partKey.getName() + " " + partKey.getType();
          if (partKey.getComment() != null) {
            partColDesc = partColDesc + " COMMENT '" +
                escapeHiveCommand(partKey.getComment()) + "'";
          }
          partCols.add(partColDesc);
        }
        tbl_partitions += StringUtils.join(partCols, ", \n");
        tbl_partitions += ")";
      }

      // Clusters (Buckets)
      String tbl_sort_bucket = "";
      List<String> buckCols = tbl.getBucketCols();
      if (buckCols.size() > 0) {
        duplicateProps.add("SORTBUCKETCOLSPREFIX");
        tbl_sort_bucket += "CLUSTERED BY ( \n  ";
        tbl_sort_bucket += StringUtils.join(buckCols, ", \n  ");
        tbl_sort_bucket += ") \n";
        List<Order> sortCols = tbl.getSortCols();
        if (sortCols.size() > 0) {
          tbl_sort_bucket += "SORTED BY ( \n";
          // Order
          List<String> sortKeys = new ArrayList<String>();
          for (Order sortCol : sortCols) {
            String sortKeyDesc = "  " + sortCol.getCol() + " ";
            if (sortCol.getOrder() == BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_ASC) {
              sortKeyDesc = sortKeyDesc + "ASC";
            }
            else if (sortCol.getOrder() == BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_DESC) {
              sortKeyDesc = sortKeyDesc + "DESC";
            }
            sortKeys.add(sortKeyDesc);
          }
          tbl_sort_bucket += StringUtils.join(sortKeys, ", \n");
          tbl_sort_bucket += ") \n";
        }
        tbl_sort_bucket += "INTO " + tbl.getNumBuckets() + " BUCKETS";
      }

      // Row format (SerDe)
      String tbl_row_format = "";
      StorageDescriptor sd = tbl.getTTable().getSd();
      SerDeInfo serdeInfo = sd.getSerdeInfo();
      tbl_row_format += "ROW FORMAT";
      if (tbl.getStorageHandler() == null) {
        if (serdeInfo.getParametersSize() > 1) {
          // There is a "serialization.format" property by default,
          // even with a delimited row format.
          // But our result will only cover the following four delimiters.
          tbl_row_format += " DELIMITED \n";
          Map<String, String> delims = serdeInfo.getParameters();
          // Warn:
          // If the four delimiters all exist in a CREATE TABLE query,
          // this following order needs to be strictly followed,
          // or the query will fail with a ParseException.
          if (delims.containsKey(serdeConstants.FIELD_DELIM)) {
            tbl_row_format += "  FIELDS TERMINATED BY '" +
                escapeHiveCommand(StringEscapeUtils.escapeJava(delims.get(
                serdeConstants.FIELD_DELIM))) + "' \n";
          }
          if (delims.containsKey(serdeConstants.COLLECTION_DELIM)) {
            tbl_row_format += "  COLLECTION ITEMS TERMINATED BY '" +
                escapeHiveCommand(StringEscapeUtils.escapeJava(delims.get(
                serdeConstants.COLLECTION_DELIM))) + "' \n";
          }
          if (delims.containsKey(serdeConstants.MAPKEY_DELIM)) {
            tbl_row_format += "  MAP KEYS TERMINATED BY '" +
                escapeHiveCommand(StringEscapeUtils.escapeJava(delims.get(
                serdeConstants.MAPKEY_DELIM))) + "' \n";
          }
          if (delims.containsKey(serdeConstants.LINE_DELIM)) {
            tbl_row_format += "  LINES TERMINATED BY '" +
                escapeHiveCommand(StringEscapeUtils.escapeJava(delims.get(
                serdeConstants.LINE_DELIM))) + "' \n";
          }
        }
        else {
          tbl_row_format += " SERDE \n  '" +
              escapeHiveCommand(serdeInfo.getSerializationLib()) + "' \n";
        }
        tbl_row_format += "STORED AS INPUTFORMAT \n  '" +
            escapeHiveCommand(sd.getInputFormat()) + "' \n";
        tbl_row_format += "OUTPUTFORMAT \n  '" +
            escapeHiveCommand(sd.getOutputFormat()) + "'";
      }
      else {
        duplicateProps.add(org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE);
        tbl_row_format += " SERDE \n  '" +
            escapeHiveCommand(serdeInfo.getSerializationLib()) + "' \n";
        tbl_row_format += "STORED BY \n  '" + escapeHiveCommand(tbl.getParameters().get(
            org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE)) + "' \n";
        // SerDe Properties
        if (serdeInfo.getParametersSize() > 0) {
          tbl_row_format += "WITH SERDEPROPERTIES ( \n";
          List<String> serdeCols = new ArrayList<String>();
          for (Map.Entry<String, String> entry : serdeInfo.getParameters().entrySet()) {
            serdeCols.add("  '" + entry.getKey() + "'='"
                + escapeHiveCommand(StringEscapeUtils.escapeJava(entry.getValue())) + "'");
          }
          tbl_row_format += StringUtils.join(serdeCols, ", \n");
          tbl_row_format += ")";
        }
      }
      String tbl_location = "  '" + escapeHiveCommand(sd.getLocation()) + "'";

      // Table properties
      String tbl_properties = "";
      Map<String, String> properties = tbl.getParameters();
      if (properties.size() > 0) {
        List<String> realProps = new ArrayList<String>();
        for (String key : properties.keySet()) {
          if (properties.get(key) != null && !duplicateProps.contains(key)) {
            realProps.add("  '" + key + "'='" +
View Full Code Here

   *           Throws this exception if an unexpected error occurs.
   */
  private int showIndexes(Hive db, ShowIndexesDesc showIndexes) throws HiveException {
    // get the indexes for the table and populate the output
    String tableName = showIndexes.getTableName();
    Table tbl = null;
    List<Index> indexes = null;

    tbl = db.getTable(tableName);

    indexes = db.getIndexes(tbl.getDbName(), tbl.getTableName(), (short) -1);

    // write the results in the file
    DataOutput outStream = null;
    try {
      Path resFile = new Path(showIndexes.getResFile());
View Full Code Here

  public int showColumns(Hive db, ShowColumnsDesc showCols)
                         throws HiveException {

    String dbName = showCols.getDbName();
    String tableName = showCols.getTableName();
    Table table = null;
    if (dbName == null) {
      table = db.getTable(tableName);
    }
    else {
      table = db.getTable(dbName, tableName);
    }

    // write the results in the file
    DataOutput outStream = null;
    try {
      Path resFile = new Path(showCols.getResFile());
      FileSystem fs = resFile.getFileSystem(conf);
      outStream = fs.create(resFile);

      List<FieldSchema> cols = table.getCols();
      cols.addAll(table.getPartCols());
      outStream.writeBytes(
          MetaDataFormatUtils.getAllColumnsInformation(cols, false));
      ((FSDataOutputStream) outStream).close();
      outStream = null;
    } catch (IOException e) {
View Full Code Here

      throw new HiveException("lock Table LockManager not specified");
    }

    HiveLockMode mode = HiveLockMode.valueOf(lockTbl.getMode());
    String tabName = lockTbl.getTableName();
    Table  tbl = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tabName);
    if (tbl == null) {
      throw new HiveException("Table " + tabName + " does not exist ");
    }

    Map<String, String> partSpec = lockTbl.getPartSpec();
View Full Code Here

    return 0;
  }

  private HiveLockObject getHiveObject(String tabName,
                                       Map<String, String> partSpec) throws HiveException {
    Table  tbl = db.getTable(tabName);
    if (tbl == null) {
      throw new HiveException("Table " + tabName + " does not exist ");
    }

    HiveLockObject obj = null;
View Full Code Here

    // get the tables for the desired pattern - populate the output stream
    List<Table> tbls = new ArrayList<Table>();
    Map<String, String> part = showTblStatus.getPartSpec();
    Partition par = null;
    if (part != null) {
      Table tbl = db.getTable(showTblStatus.getDbName(), showTblStatus.getPattern());
      par = db.getPartition(tbl, part, false);
      if (par == null) {
        throw new HiveException("Partition " + part + " for table "
            + showTblStatus.getPattern() + " does not exist.");
      }
      tbls.add(tbl);
    } else {
      LOG.info("pattern: " + showTblStatus.getPattern());
      List<String> tblStr = db.getTablesForDb(showTblStatus.getDbName(),
          showTblStatus.getPattern());
      SortedSet<String> sortedTbls = new TreeSet<String>(tblStr);
      Iterator<String> iterTbls = sortedTbls.iterator();
      while (iterTbls.hasNext()) {
        // create a row per table name
        String tblName = iterTbls.next();
        Table tbl = db.getTable(showTblStatus.getDbName(), tblName);
        tbls.add(tbl);
      }
      LOG.info("results : " + tblStr.size());
    }
View Full Code Here

   */
  private int showTableProperties(Hive db, ShowTblPropertiesDesc showTblPrpt) throws HiveException {
    String tableName = showTblPrpt.getTableName();

    // show table properties - populate the output stream
    Table tbl = db.getTable(tableName, false);
    DataOutput outStream = null;
    try {
      Path resFile = new Path(showTblPrpt.getResFile());
      FileSystem fs = resFile.getFileSystem(conf);
      outStream = fs.create(resFile);

      if (tbl == null) {
        String errMsg = "Table " + tableName + " does not exist";
        outStream.write(errMsg.getBytes("UTF-8"));
        ((FSDataOutputStream) outStream).close();
        outStream = null;
        return 0;
      }

      LOG.info("DDLTask: show properties for " + tbl.getTableName());

      String propertyName = showTblPrpt.getPropertyName();
      if (propertyName != null) {
        String propertyValue = tbl.getProperty(propertyName);
        if (propertyValue == null) {
          String errMsg = "Table " + tableName + " does not have property: " + propertyName;
          outStream.write(errMsg.getBytes("UTF-8"));
        }
        else {
          outStream.writeBytes(propertyValue);
        }
      }
      else {
        Map<String, String> properties = tbl.getParameters();
        for (String key : properties.keySet()) {
          writeKeyValuePair(outStream, key, properties.get(key));
        }
      }

      LOG.info("DDLTask: written data for showing properties of " + tbl.getTableName());
      ((FSDataOutputStream) outStream).close();
      outStream = null;

    } catch (FileNotFoundException e) {
      LOG.info("show table properties: " + stringifyException(e));
View Full Code Here

  private int describeTable(Hive db, DescTableDesc descTbl) throws HiveException {
    String colPath = descTbl.getColumnPath();
    String tableName = descTbl.getTableName();

    // describe the table - populate the output stream
    Table tbl = db.getTable(tableName, false);
    Partition part = null;
    DataOutputStream outStream = null;
    try {
      Path resFile = new Path(descTbl.getResFile());
      if (tbl == null) {
        FileSystem fs = resFile.getFileSystem(conf);
        outStream = fs.create(resFile);
        outStream.close();
        outStream = null;
        throw new HiveException(ErrorMsg.INVALID_TABLE, tableName);
      }
      if (descTbl.getPartSpec() != null) {
        part = db.getPartition(tbl, descTbl.getPartSpec(), false);
        if (part == null) {
          FileSystem fs = resFile.getFileSystem(conf);
          outStream = fs.create(resFile);
          outStream.close();
          outStream = null;
          throw new HiveException(ErrorMsg.INVALID_PARTITION,
                  StringUtils.join(descTbl.getPartSpec().keySet(), ','), tableName);
        }
        tbl = part.getTable();
      }
    } catch (IOException e) {
      throw new HiveException(e, ErrorMsg.GENERIC_ERROR, tableName);
    } finally {
      IOUtils.closeStream(outStream);
    }

    try {
      LOG.info("DDLTask: got data for " + tbl.getTableName());
      Path resFile = new Path(descTbl.getResFile());
      FileSystem fs = resFile.getFileSystem(conf);
      outStream = fs.create(resFile);

      List<FieldSchema> cols = null;
      if (colPath.equals(tableName)) {
        cols = (part == null || tbl.getTableType() == TableType.VIRTUAL_VIEW) ?
            tbl.getCols() : part.getCols();

        if (!descTbl.isFormatted()) {
          if (tableName.equals(colPath)) {
            cols.addAll(tbl.getPartCols());
          }
        }
      } else {
        cols = Hive.getFieldsFromDeserializer(colPath, tbl.getDeserializer());
      }

      formatter.describeTable(outStream, colPath, tableName, tbl, part, cols,
                              descTbl.isFormatted(), descTbl.isExt(), descTbl.isPretty());

      LOG.info("DDLTask: written data for " + tbl.getTableName());
      outStream.close();
      outStream = null;

    } catch (IOException e) {
      throw new HiveException(e, ErrorMsg.GENERIC_ERROR, tableName);
View Full Code Here

   * @throws HiveException
   *           Throws this exception if an unexpected error occurs.
   */
  private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException {
    // alter the table
    Table tbl = db.getTable(alterTbl.getOldName());

    Partition part = null;
    List<Partition> allPartitions = null;
    if (alterTbl.getPartSpec() != null) {
      if (alterTbl.getOp() != AlterTableDesc.AlterTableTypes.ALTERPROTECTMODE) {
        part = db.getPartition(tbl, alterTbl.getPartSpec(), false);
        if (part == null) {
          throw new HiveException(ErrorMsg.INVALID_PARTITION,
                  StringUtils.join(alterTbl.getPartSpec().keySet(), ',') + " for table " + alterTbl.getOldName());
        }
      }
      else {
        allPartitions = db.getPartitions(tbl, alterTbl.getPartSpec());
      }
    }

    Table oldTbl = tbl.copy();

    if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.RENAME) {
      tbl.setTableName(alterTbl.getNewName());
    } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDCOLS) {
      List<FieldSchema> newCols = alterTbl.getNewCols();
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.metadata.Table

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.