Package org.apache.hadoop.hive.ql.metadata

Examples of org.apache.hadoop.hive.ql.metadata.HiveException


          dbName = hiveObjectDesc.getObject();
          dbObj = db.getDatabase(dbName);
          notFound = (dbObj == null);
        }
        if (notFound) {
          throw new HiveException(obj + " can not be found");
        }

        String partName = null;
        List<String> partValues = null;
        if (hiveObjectDesc.getPartSpec() != null) {
          partName = Warehouse
              .makePartName(hiveObjectDesc.getPartSpec(), false);
          partValues = Warehouse.getPartValuesFromPartName(partName);
        }

        if (!hiveObjectDesc.getTable()) {
          // show database level privileges
          List<HiveObjectPrivilege> dbs = db.showPrivilegeGrant(HiveObjectType.DATABASE, principalName,
              principalDesc.getType(), dbName, null, null, null);
          if (dbs != null && dbs.size() > 0) {
            boolean first = true;
            sortPrivileges(dbs);
            for (HiveObjectPrivilege db : dbs) {
              if (!first) {
                outStream.write(terminator);
              } else {
                first = false;
              }

              writeGrantInfo(outStream, principalDesc.getType(), principalName,
                  dbName, null, null, null, db.getGrantInfo());

            }
          }

        } else {
          if (showGrantDesc.getColumns() != null) {
            // show column level privileges
            for (String columnName : showGrantDesc.getColumns()) {
              List<HiveObjectPrivilege> columnss = db.showPrivilegeGrant(
                  HiveObjectType.COLUMN, principalName,
                  principalDesc.getType(), dbName, tableName, partValues,
                  columnName);
              if (columnss != null && columnss.size() > 0) {
                boolean first = true;
                sortPrivileges(columnss);
                for (HiveObjectPrivilege col : columnss) {
                  if (!first) {
                    outStream.write(terminator);
                  } else {
                    first = false;
                  }

                  writeGrantInfo(outStream, principalDesc.getType(),
                      principalName, dbName, tableName, partName, columnName,
                      col.getGrantInfo());
                }
              }
            }
          } else if (hiveObjectDesc.getPartSpec() != null) {
            // show partition level privileges
            List<HiveObjectPrivilege> parts = db.showPrivilegeGrant(
                HiveObjectType.PARTITION, principalName, principalDesc
                    .getType(), dbName, tableName, partValues, null);
            if (parts != null && parts.size() > 0) {
              boolean first = true;
              sortPrivileges(parts);
              for (HiveObjectPrivilege part : parts) {
                if (!first) {
                  outStream.write(terminator);
                } else {
                  first = false;
                }

                writeGrantInfo(outStream, principalDesc.getType(),
                    principalName, dbName, tableName, partName, null, part.getGrantInfo());

              }
            }
          } else {
            // show table level privileges
            List<HiveObjectPrivilege> tbls = db.showPrivilegeGrant(
                HiveObjectType.TABLE, principalName, principalDesc.getType(),
                dbName, tableName, null, null);
            if (tbls != null && tbls.size() > 0) {
              boolean first = true;
              sortPrivileges(tbls);
              for (HiveObjectPrivilege tbl : tbls) {
                if (!first) {
                  outStream.write(terminator);
                } else {
                  first = false;
                }

                writeGrantInfo(outStream, principalDesc.getType(),
                    principalName, dbName, tableName, null, null, tbl.getGrantInfo());

              }
            }
          }
        }
      }
      ((FSDataOutputStream) outStream).close();
      outStream = null;
    } catch (FileNotFoundException e) {
      LOG.info("show table status: " + stringifyException(e));
      return 1;
    } catch (IOException e) {
      LOG.info("show table status: " + stringifyException(e));
      return 1;
    } catch (Exception e) {
      e.printStackTrace();
      throw new HiveException(e);
    } finally {
      IOUtils.closeStream((FSDataOutputStream) outStream);
    }
    return 0;
  }
View Full Code Here


    try {

      if (privSubjectDesc != null) {
        if (privSubjectDesc.getPartSpec() != null && isGrant) {
          throw new HiveException("Grant does not support partition level.");
        }
        String obj = privSubjectDesc.getObject();
        boolean notFound = true;
        if (privSubjectDesc.getTable()) {
          String[] dbTab = obj.split("\\.");
          if (dbTab.length == 2) {
            dbName = dbTab[0];
            tableName = dbTab[1];
          } else {
            dbName = SessionState.get().getCurrentDatabase();
            tableName = obj;
          }
          dbObj = db.getDatabase(dbName);
          tableObj = db.getTable(dbName, tableName);
          notFound = (dbObj == null || tableObj == null);
        } else {
          dbName = privSubjectDesc.getObject();
          dbObj = db.getDatabase(dbName);
          notFound = (dbObj == null);
        }
        if (notFound) {
          throw new HiveException(obj + " can not be found");
        }
      }

      PrivilegeBag privBag = new PrivilegeBag();
      if (privSubjectDesc == null) {
        for (int idx = 0; idx < privileges.size(); idx++) {
          Privilege priv = privileges.get(idx).getPrivilege();
          if (privileges.get(idx).getColumns() != null
              && privileges.get(idx).getColumns().size() > 0) {
            throw new HiveException(
                "For user-level privileges, column sets should be null. columns="
                    + privileges.get(idx).getColumns().toString());
          }

          privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef(
              HiveObjectType.GLOBAL, null, null, null, null), null, null,
              new PrivilegeGrantInfo(priv.toString(), 0, grantor, grantorType,
                  grantOption)));
        }
      } else {
        org.apache.hadoop.hive.metastore.api.Partition partObj = null;
        List<String> partValues = null;
        if (tableObj != null) {
          if ((!tableObj.isPartitioned())
              && privSubjectDesc.getPartSpec() != null) {
            throw new HiveException(
                "Table is not partitioned, but partition name is present: partSpec="
                    + privSubjectDesc.getPartSpec().toString());
          }

          if (privSubjectDesc.getPartSpec() != null) {
            partObj = db.getPartition(tableObj, privSubjectDesc.getPartSpec(),
                false).getTPartition();
            partValues = partObj.getValues();
          }
        }

        for (PrivilegeDesc privDesc : privileges) {
          List<String> columns = privDesc.getColumns();
          Privilege priv = privDesc.getPrivilege();
          if (columns != null && columns.size() > 0) {
            if (!priv.supportColumnLevel()) {
              throw new HiveException(priv.toString()
                  + " does not support column level.");
            }
            if (privSubjectDesc == null || tableName == null) {
              throw new HiveException(
                  "For user-level/database-level privileges, column sets should be null. columns="
                      + columns);
            }
            for (int i = 0; i < columns.size(); i++) {
              privBag.addToPrivileges(new HiveObjectPrivilege(
View Full Code Here

          }
          ((FSDataOutputStream) outStream).close();
          outStream = null;
        }
      } else {
        throw new HiveException("Unkown role operation "
            + operation.getOperationName());
      }
    } catch (HiveException e) {
      console.printError("Error in role operation "
          + operation.getOperationName() + " on role name "
View Full Code Here

      } else { // if one of them is null, replace the old params with the new one
        database.setParameters(newParams);
      }
      db.alterDatabase(database.getName(), database);
    } else {
      throw new HiveException(ErrorMsg.DATABASE_NOT_EXISTS, dbName);
    }
    return 0;
  }
View Full Code Here

              props.put("base_timestamp", basePartTs.get(null).toString());
            }
          }
          idx.getParameters().putAll(props);
        } catch (HiveException e) {
          throw new HiveException("ERROR: Failed to update index timestamps");
        } catch (IOException e) {
          throw new HiveException("ERROR: Failed to look up timestamps on filesystem");
        }

        break;
      default:
        console.printError("Unsupported Alter commnad");
View Full Code Here

                    addPartitionDesc.getBucketCols(),
                    addPartitionDesc.getSortCols());

    } else {
      if (tbl.isView()) {
        throw new HiveException("LOCATION clause illegal for view partition");
      }
      // set partition path relative to table
      db.createPartition(tbl, addPartitionDesc.getPartSpec(), new Path(tbl
                    .getPath(), addPartitionDesc.getLocation()), addPartitionDesc.getPartParams(),
                    addPartitionDesc.getInputFormat(),
View Full Code Here

    tbl.getTTable().setPartitionKeys(newPartitionKeys);

    try {
      db.alterTable(tabName, tbl);
    } catch (InvalidOperationException e) {
      throw new HiveException(e, ErrorMsg.GENERIC_ERROR, "Unable to alter " + tabName);
    }

    work.getInputs().add(new ReadEntity(tbl));
    work.getOutputs().add(new WriteEntity(tbl));
View Full Code Here

    if (touchDesc.getPartSpec() == null) {
      try {
        db.alterTable(tblName, tbl);
      } catch (InvalidOperationException e) {
        throw new HiveException("Uable to update table");
      }
      work.getInputs().add(new ReadEntity(tbl));
      work.getOutputs().add(new WriteEntity(tbl));
    } else {
      Partition part = db.getPartition(tbl, touchDesc.getPartSpec(), false);
      if (part == null) {
        throw new HiveException("Specified partition does not exist");
      }
      try {
        db.alterPartition(tblName, part);
      } catch (InvalidOperationException e) {
        throw new HiveException(e);
      }
      work.getInputs().add(new ReadEntity(part));
      work.getOutputs().add(new WriteEntity(part));
    }
    return 0;
View Full Code Here

  private boolean pathExists(Path p) throws HiveException {
    try {
      FileSystem fs = p.getFileSystem(conf);
      return fs.exists(p);
    } catch (IOException e) {
      throw new HiveException(e);
    }
  }
View Full Code Here

  }

  private void moveDir(FileSystem fs, Path from, Path to) throws HiveException {
    try {
      if (!fs.rename(from, to)) {
        throw new HiveException("Moving " + from + " to " + to + " failed!");
      }
    } catch (IOException e) {
      throw new HiveException(e);
    }
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.metadata.HiveException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.