Examples of Hive


Examples of org.apache.hadoop.hive.ql.metadata.Hive

  @Override
  public int execute(DriverContext driverContext) {

    // Create the db
    Hive db;
    try {
      db = Hive.get(conf);

      CreateDatabaseDesc createDatabaseDesc = work.getCreateDatabaseDesc();
      if (null != createDatabaseDesc) {
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.Hive

      throws HiveException, AuthorizationException {
    HashSet<ReadEntity> inputs = sem.getInputs();
    HashSet<WriteEntity> outputs = sem.getOutputs();
    SessionState ss = SessionState.get();
    HiveOperation op = ss.getHiveOperation();
    Hive db = sem.getDb();
    if (op != null) {
      if (op.equals(HiveOperation.CREATETABLE_AS_SELECT)
          || op.equals(HiveOperation.CREATETABLE)) {
        ss.getAuthorizer().authorize(
            db.getDatabase(db.getCurrentDatabase()), null,
            HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges());
      }
      if (outputs != null && outputs.size() > 0) {
        for (WriteEntity write : outputs) {

          if (write.getType() == WriteEntity.Type.PARTITION) {
            Partition part = db.getPartition(write.getTable(), write
                .getPartition().getSpec(), false);
            if (part != null) {
              ss.getAuthorizer().authorize(write.getPartition(), null,
                      op.getOutputRequiredPrivileges());
              continue;
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.Hive

                               String.valueOf(System.currentTimeMillis()),
                               "IMPLICIT");

      // Lock the database also
      try {
        Hive db = Hive.get(conf);
        lockObjects.add(new HiveLockObj(
                                        new HiveLockObject(db.getCurrentDatabase(), lockData),
                                        HiveLockMode.SHARED));
      } catch (HiveException e) {
        throw new SemanticException(e.getMessage());
      }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.Hive

  @Override
  public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
  throws SemanticException {

    Hive db;
    try {
      db = context.getHive();
    } catch (HiveException e) {
      throw new SemanticException("Couldn't get Hive DB instance in semantic analysis phase.", e);
    }

    // Analyze and create tbl properties object
    int numCh = ast.getChildCount();

    databaseName = BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());

    for (int num = 1; num < numCh; num++) {
      ASTNode child = (ASTNode) ast.getChild(num);

      switch (child.getToken().getType()) {

      case HiveParser.TOK_QUERY: // CTAS
        throw new SemanticException("Operation not supported. Create db as Select is not a valid operation.");

      case HiveParser.TOK_IFNOTEXISTS:
        try {
          List<String> dbs = db.getDatabasesByPattern(databaseName);
          if (dbs != null && dbs.size() > 0) { // db exists
            return null;
          }
        } catch (HiveException e) {
          throw new SemanticException(e);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.Hive

  @Override
  public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
  throws SemanticException {

    Hive db;
    try {
      db = context.getHive();
    } catch (HiveException e) {
      throw new SemanticException("Couldn't get Hive DB instance in semantic analysis phase.", e);
    }

    // Analyze and create tbl properties object
    int numCh = ast.getChildCount();

    String inputFormat = null, outputFormat = null;
    tableName = BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());

    for (int num = 1; num < numCh; num++) {
      ASTNode child = (ASTNode) ast.getChild(num);

      switch (child.getToken().getType()) {

      case HiveParser.TOK_QUERY: // CTAS
        throw new SemanticException("Operation not supported. Create table as Select is not a valid operation.");

      case HiveParser.TOK_TABLEBUCKETS:
        throw new SemanticException("Operation not supported. Howl doesn't allow Clustered By in create table.");

      case HiveParser.TOK_TBLSEQUENCEFILE:
        throw new SemanticException("Operation not supported. Howl doesn't support Sequence File by default yet. " +
        "You may specify it through INPUT/OUTPUT storage drivers.");

      case HiveParser.TOK_TBLTEXTFILE:
        throw new SemanticException("Operation not supported. Howl doesn't support Text File by default yet. " +
        "You may specify it through INPUT/OUTPUT storage drivers.");

      case HiveParser.TOK_LIKETABLE:

        String likeTableName;
        if (child.getChildCount() > 0 && (likeTableName = BaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText())) != null) {

          throw new SemanticException("Operation not supported. CREATE TABLE LIKE is not supported.");
//          Map<String, String> tblProps;
//          try {
//            tblProps = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, likeTableName).getParameters();
//          } catch (HiveException he) {
//            throw new SemanticException(he);
//          }
//          if(!(tblProps.containsKey(InitializeInput.HOWL_ISD_CLASS) && tblProps.containsKey(InitializeInput.HOWL_OSD_CLASS))){
//            throw new SemanticException("Operation not supported. Table "+likeTableName+" should have been created through Howl. Seems like its not.");
//          }
//          return ast;
        }
        break;

      case HiveParser.TOK_IFNOTEXISTS:
        try {
          List<String> tables = db.getTablesByPattern(tableName);
          if (tables != null && tables.size() > 0) { // table exists
            return null;
          }
        } catch (HiveException e) {
          throw new SemanticException(e);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.Hive

    Map<String, String> howlProps = new HashMap<String, String>(2);
    howlProps.put(HowlConstants.HOWL_ISD_CLASS, inDriver);
    howlProps.put(HowlConstants.HOWL_OSD_CLASS, outDriver);

    try {
      Hive db = context.getHive();
      Table tbl = db.getTable(tableName);
      if(partSpec == null){
        // File format is for table; not for partition.
        tbl.getTTable().getParameters().putAll(howlProps);
        db.alterTable(tableName, tbl);
      }else{
        Partition part = db.getPartition(tbl,partSpec,false);
        Map<String,String> partParams = part.getParameters();
        if(partParams == null){
          partParams = new HashMap<String, String>();
        }
        partParams.putAll(howlProps);
        part.getTPartition().setParameters(partParams);
        db.alterPartition(tableName, part);
      }
    } catch (HiveException he) {
      throw new SemanticException(he);
    } catch (InvalidOperationException e) {
      throw new SemanticException(e);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.Hive

      throws HiveException, AuthorizationException {
    HashSet<ReadEntity> inputs = sem.getInputs();
    HashSet<WriteEntity> outputs = sem.getOutputs();
    SessionState ss = SessionState.get();
    HiveOperation op = ss.getHiveOperation();
    Hive db = sem.getDb();
    if (op != null) {
      if (op.equals(HiveOperation.CREATETABLE_AS_SELECT)
          || op.equals(HiveOperation.CREATETABLE)) {
        ss.getAuthorizer().authorize(
            db.getDatabase(db.getCurrentDatabase()), null,
            HiveOperation.CREATETABLE_AS_SELECT.getOutputRequiredPrivileges());
      }
      if (outputs != null && outputs.size() > 0) {
        for (WriteEntity write : outputs) {

          if (write.getType() == WriteEntity.Type.PARTITION) {
            Partition part = db.getPartition(write.getTable(), write
                .getPartition().getSpec(), false);
            if (part != null) {
              ss.getAuthorizer().authorize(write.getPartition(), null,
                      op.getOutputRequiredPrivileges());
              continue;
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.Hive

                               String.valueOf(System.currentTimeMillis()),
                               "IMPLICIT");

      // Lock the database also
      try {
        Hive db = Hive.get(conf);
        lockObjects.add(new HiveLockObj(
                                        new HiveLockObject(db.getCurrentDatabase(), lockData),
                                        HiveLockMode.SHARED));
      } catch (HiveException e) {
        throw new SemanticException(e.getMessage());
      }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.Hive

      return 0;
    }

    FsPermission perms = FsPermission.valueOf(permsStr);
    if(!tblName.isEmpty()){
      Hive db = null;
      try{
        db = Hive.get();
        Table tbl =  db.getTable(tblName);
        Path tblPath = tbl.getPath();

        FileSystem fs = tblPath.getFileSystem(conf);
        if(null != perms){
          fs.setPermission(tblPath, perms);
        }
        if(null != grp){
          fs.setOwner(tblPath, null, grp);
        }
        return 0;

      } catch (Exception e){
          ss.err.println(String.format("Failed to set permissions/groups on TABLE: <%s> %s",tblName,e.getMessage()));
          try // We need to drop the table.
            if(null != db){ db.dropTable(tblName); }
          } catch (HiveException he) {
            ss.err.println(String.format("Failed to drop TABLE <%s> after failing to set permissions/groups on it. %s",tblName,e.getMessage()));
          }
          return 1;
      }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.metadata.Hive

  }

  public int execute() {

    // Create the db
    Hive db;
    FileSystem fs;
    try {
      db = Hive.get(conf);
      fs = FileSystem.get(conf);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.