Package org.apache.hadoop.hive.ql.parse

Examples of org.apache.hadoop.hive.ql.parse.SemanticException


    Hive db;
    try {
      db = context.getHive();
    } catch (HiveException e) {
      throw new SemanticException(
        "Couldn't get Hive DB instance in semantic analysis phase.",
        e);
    }

    // Analyze and create tbl properties object
    int numCh = ast.getChildCount();

    String inputFormat = null, outputFormat = null;
    tableName = BaseSemanticAnalyzer.getUnescapedName((ASTNode) ast
      .getChild(0));
    boolean likeTable = false;

    for (int num = 1; num < numCh; num++) {
      ASTNode child = (ASTNode) ast.getChild(num);

      switch (child.getToken().getType()) {

      case HiveParser.TOK_QUERY: // CTAS
        throw new SemanticException(
          "Operation not supported. Create table as " +
            "Select is not a valid operation.");

      case HiveParser.TOK_TABLEBUCKETS:
        break;

      case HiveParser.TOK_TBLSEQUENCEFILE:
        inputFormat = HCatConstants.SEQUENCEFILE_INPUT;
        outputFormat = HCatConstants.SEQUENCEFILE_OUTPUT;
        break;

      case HiveParser.TOK_TBLTEXTFILE:
        inputFormat = org.apache.hadoop.mapred.TextInputFormat.class.getName();
        outputFormat = org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat.class.getName();

        break;

      case HiveParser.TOK_LIKETABLE:
        likeTable = true;
        break;

      case HiveParser.TOK_IFNOTEXISTS:
        try {
          List<String> tables = db.getTablesByPattern(tableName);
          if (tables != null && tables.size() > 0) { // table
            // exists
            return ast;
          }
        } catch (HiveException e) {
          throw new SemanticException(e);
        }
        break;

      case HiveParser.TOK_TABLEPARTCOLS:
        List<FieldSchema> partCols = BaseSemanticAnalyzer
          .getColumns((ASTNode) child.getChild(0), false);
        for (FieldSchema fs : partCols) {
          if (!fs.getType().equalsIgnoreCase("string")) {
            throw new SemanticException(
              "Operation not supported. HCatalog only " +
                "supports partition columns of type string. "
                + "For column: "
                + fs.getName()
                + " Found type: " + fs.getType());
          }
        }
        break;

      case HiveParser.TOK_STORAGEHANDLER:
        String storageHandler = BaseSemanticAnalyzer
          .unescapeSQLString(child.getChild(0).getText());
        if (org.apache.commons.lang.StringUtils
          .isNotEmpty(storageHandler)) {
          return ast;
        }

        break;

      case HiveParser.TOK_TABLEFILEFORMAT:
        if (child.getChildCount() < 2) {
          throw new SemanticException(
            "Incomplete specification of File Format. " +
              "You must provide InputFormat, OutputFormat.");
        }
        inputFormat = BaseSemanticAnalyzer.unescapeSQLString(child
          .getChild(0).getText());
        outputFormat = BaseSemanticAnalyzer.unescapeSQLString(child
          .getChild(1).getText());
        break;

      case HiveParser.TOK_TBLRCFILE:
        inputFormat = RCFileInputFormat.class.getName();
        outputFormat = RCFileOutputFormat.class.getName();
        break;

      }
    }

    if (!likeTable && (inputFormat == null || outputFormat == null)) {
      throw new SemanticException(
        "STORED AS specification is either incomplete or incorrect.");
    }


    return ast;
View Full Code Here


            desc.getInputFormat(),
            desc.getOutputFormat());
        //Authorization checks are performed by the storageHandler.getAuthorizationProvider(), if
        //StorageDelegationAuthorizationProvider is used.
      } catch (IOException e) {
        throw new SemanticException(e);
      }
    }

    if (desc != null) {
      try {
        Table table = context.getHive().newTable(desc.getTableName());
        if (desc.getLocation() != null) {
          table.setDataLocation(new Path(desc.getLocation()).toUri());
        }
        if (desc.getStorageHandler() != null) {
          table.setProperty(
            org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE,
            desc.getStorageHandler());
        }
        for (Map.Entry<String, String> prop : tblProps.entrySet()) {
          table.setProperty(prop.getKey(), prop.getValue());
        }
        for (Map.Entry<String, String> prop : desc.getSerdeProps().entrySet()) {
          table.setSerdeParam(prop.getKey(), prop.getValue());
        }
        //TODO: set other Table properties as needed

        //authorize against the table operation so that location permissions can be checked if any

        if (HiveConf.getBoolVar(context.getConf(),
          HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) {
          authorize(table, Privilege.CREATE);
        }
      } catch (HiveException ex) {
        throw new SemanticException(ex);
      }
    }

    desc.setTblProps(tblProps);
    context.getConf().set(HCatConstants.HCAT_CREATE_TBL_NAME, tableName);
View Full Code Here

  private void setLockManager() throws SemanticException {
    boolean supportConcurrency = conf.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY);
    if (supportConcurrency) {
      String lockMgr = conf.getVar(HiveConf.ConfVars.HIVE_LOCK_MANAGER);
      if ((lockMgr == null) || (lockMgr.isEmpty())) {
        throw new SemanticException(ErrorMsg.LOCKMGR_NOT_SPECIFIED.getMsg());
      }

      try {
        hiveLockMgr = (HiveLockManager) ReflectionUtils.newInstance(conf.getClassByName(lockMgr),
            conf);
        hiveLockMgr.setContext(new HiveLockManagerCtx(conf));
      } catch (Exception e) {
        // set hiveLockMgr to null just in case this invalid manager got set to
        // next query's ctx.
        if (hiveLockMgr != null) {
          try {
            hiveLockMgr.close();
          } catch (LockException e1) {
            //nothing can do here
          }
          hiveLockMgr = null;
        }
        throw new SemanticException(ErrorMsg.LOCKMGR_NOT_INITIALIZED.getMsg() + e.getMessage());
      }
    }
  }
View Full Code Here

                                                            + "/" + p.getTable().getTableName()
                                                            + "/" + partialName,
                                                              partialSpec), lockData), mode));
          partialName += "/";
        } catch (HiveException e) {
          throw new SemanticException(e.getMessage());
        }
      }

      locks.add(new HiveLockObj(new HiveLockObject(p.getTable(), lockData), mode));
      locks.add(new HiveLockObj(new HiveLockObject(p.getTable().getDbName(), lockData), mode));
View Full Code Here

          );

      List<HiveLock> hiveLocks = ctx.getHiveLockMgr().lock(lockObjects, false);

      if (hiveLocks == null) {
        throw new SemanticException(ErrorMsg.LOCK_CANNOT_BE_ACQUIRED.getMsg());
      } else {
        ctx.setHiveLocks(hiveLocks);
      }

      return (0);
View Full Code Here

   */
  public void removeChildAndAdoptItsChildren(
    Operator<? extends OperatorDesc> child) throws SemanticException {
    int childIndex = childOperators.indexOf(child);
    if (childIndex == -1) {
      throw new SemanticException(
          "Exception when trying to remove partition predicates: fail to find child from parent");
    }

    childOperators.remove(childIndex);
    if (child.getChildOperators() != null &&
        child.getChildOperators().size() > 0) {
      childOperators.addAll(childIndex, child.getChildOperators());
    }

    for (Operator<? extends OperatorDesc> gc : child.getChildOperators()) {
      List<Operator<? extends OperatorDesc>> parents = gc.getParentOperators();
      int index = parents.indexOf(child);
      if (index == -1) {
        throw new SemanticException(
          "Exception when trying to remove partition predicates: fail to find parent from child");
      }
      parents.set(index, this);
    }
  }
View Full Code Here

    case HiveParser.TOK_IMPORT:
      return ast;

    // In all other cases, throw an exception. Its a white-list of allowed operations.
    default:
      throw new SemanticException("Operation not supported.");

    }
  }
View Full Code Here

      default:
        throw new HCatException(ErrorType.ERROR_INTERNAL_EXCEPTION, "Unexpected token: "+ast.getToken());
      }
    } catch(HCatException e){
      throw new SemanticException(e);
    } catch (MetaException e) {
      throw new SemanticException(e);
    } catch (HiveException e) {
      throw new SemanticException(e);
  }

    if(hook != null){
      hook.postAnalyze(context, rootTasks);
    }
View Full Code Here

    Hive db;
    try {
      db = context.getHive();
    } catch (HiveException e) {
      throw new SemanticException("Couldn't get Hive DB instance in semantic analysis phase.", e);
    }

    // Analyze and create tbl properties object
    int numCh = ast.getChildCount();

    databaseName = BaseSemanticAnalyzer.getUnescapedName((ASTNode)ast.getChild(0));

    for (int num = 1; num < numCh; num++) {
      ASTNode child = (ASTNode) ast.getChild(num);

      switch (child.getToken().getType()) {

      case HiveParser.TOK_QUERY: // CTAS
        throw new SemanticException("Operation not supported. Create db as Select is not a valid operation.");

      case HiveParser.TOK_IFNOTEXISTS:
        try {
          List<String> dbs = db.getDatabasesByPattern(databaseName);
          if (dbs != null && dbs.size() > 0) { // db exists
            return ast;
          }
        } catch (HiveException e) {
          throw new SemanticException(e);
        }
        break;
      }
    }
View Full Code Here

    Hive db;
    try {
      db = context.getHive();
    } catch (HiveException e) {
      throw new SemanticException("Couldn't get Hive DB instance in semantic analysis phase.", e);
    }

    // Analyze and create tbl properties object
    int numCh = ast.getChildCount();

    String inputFormat = null, outputFormat = null;
    tableName = BaseSemanticAnalyzer.getUnescapedName((ASTNode)ast.getChild(0));

    for (int num = 1; num < numCh; num++) {
      ASTNode child = (ASTNode) ast.getChild(num);

      switch (child.getToken().getType()) {

      case HiveParser.TOK_QUERY: // CTAS
        throw new SemanticException("Operation not supported. Create table as Select is not a valid operation.");

      case HiveParser.TOK_TABLEBUCKETS:
        throw new SemanticException("Operation not supported. HCatalog doesn't allow Clustered By in create table.");

      case HiveParser.TOK_TBLSEQUENCEFILE:
        throw new SemanticException("Operation not supported. HCatalog doesn't support Sequence File by default yet. " +
        "You may specify it through INPUT/OUTPUT storage drivers.");

      case HiveParser.TOK_TBLTEXTFILE:
        throw new SemanticException("Operation not supported. HCatalog doesn't support Text File by default yet. " +
        "You may specify it through INPUT/OUTPUT storage drivers.");

      case HiveParser.TOK_LIKETABLE:

        String likeTableName;
        if (child.getChildCount() > 0 && (likeTableName = BaseSemanticAnalyzer.getUnescapedName((ASTNode)ast.getChild(0))) != null) {

          throw new SemanticException("Operation not supported. CREATE TABLE LIKE is not supported.");
//          Map<String, String> tblProps;
//          try {
//            tblProps = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, likeTableName).getParameters();
//          } catch (HiveException he) {
//            throw new SemanticException(he);
//          }
//          if(!(tblProps.containsKey(InitializeInput.HOWL_ISD_CLASS) && tblProps.containsKey(InitializeInput.HOWL_OSD_CLASS))){
//            throw new SemanticException("Operation not supported. Table "+likeTableName+" should have been created through HCat. Seems like its not.");
//          }
//          return ast;
        }
        break;

      case HiveParser.TOK_IFNOTEXISTS:
        try {
          List<String> tables = db.getTablesByPattern(tableName);
          if (tables != null && tables.size() > 0) { // table exists
            return ast;
          }
        } catch (HiveException e) {
          throw new SemanticException(e);
        }
        break;

      case HiveParser.TOK_TABLEPARTCOLS:
        List<FieldSchema> partCols = BaseSemanticAnalyzer.getColumns((ASTNode) child.getChild(0), false);
        for(FieldSchema fs : partCols){
          if(!fs.getType().equalsIgnoreCase("string")){
            throw new SemanticException("Operation not supported. HCatalog only supports partition columns of type string. " +
                "For column: "+fs.getName()+" Found type: "+fs.getType());
          }
        }
        break;

      case HiveParser.TOK_TABLEFILEFORMAT:
        if(child.getChildCount() < 4) {
          throw new SemanticException("Incomplete specification of File Format. You must provide InputFormat, OutputFormat, InputDriver, OutputDriver.");
        }
        inputFormat      = BaseSemanticAnalyzer.unescapeSQLString(child.getChild(0).getText());
        outputFormat     = BaseSemanticAnalyzer.unescapeSQLString(child.getChild(1).getText());
        inStorageDriver  = BaseSemanticAnalyzer.unescapeSQLString(child.getChild(2).getText());
        outStorageDriver = BaseSemanticAnalyzer.unescapeSQLString(child.getChild(3).getText());
        break;

      case HiveParser.TOK_TBLRCFILE:
        inputFormat      = RCFileInputFormat.class.getName();
        outputFormat     = RCFileOutputFormat.class.getName();
        inStorageDriver  = RCFileInputDriver.class.getName();
        outStorageDriver = RCFileOutputDriver.class.getName();
        break;

      }
    }

    if(inputFormat == null || outputFormat == null || inStorageDriver == null || outStorageDriver == null){
      throw new SemanticException("STORED AS specification is either incomplete or incorrect.");
    }

    return ast;
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.parse.SemanticException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.