Package org.apache.hadoop.hive.ql.metadata

Examples of org.apache.hadoop.hive.ql.metadata.AuthorizationException


    hiveObject = hiveObject + "}";

    if (inputCheck != null) {
      int input = this.firstFalseIndex(inputCheck);
      if (input >= 0) {
        throw new AuthorizationException("No privilege '"
            + inputRequiredPriv[input].getPriv() + "' found for inputs "
            + hiveObject);
      }
    }

    if (outputCheck != null) {
      int output = this.firstFalseIndex(outputCheck);
      if (output >= 0) {
        throw new AuthorizationException("No privilege '"
            + outputRequiredPriv[output].getPriv() + "' found for outputs "
            + hiveObject);
      }
    }
  }
View Full Code Here


  protected FsAction getFsAction(Privilege priv, Path path) {

    switch (priv.getPriv()) {
    case ALL:
      throw new AuthorizationException("no matching Action for Privilege.All");
    case ALTER_DATA:
      return FsAction.WRITE;
    case ALTER_METADATA:
      return FsAction.WRITE;
    case CREATE:
      return FsAction.WRITE;
    case DROP:
      return FsAction.WRITE;
    case INDEX:
      return FsAction.WRITE;
    case LOCK:
      return FsAction.WRITE;
    case SELECT:
      return FsAction.READ;
    case SHOW_DATABASE:
      return FsAction.READ;
    case UNKNOWN:
    default:
      throw new AuthorizationException("Unknown privilege");
    }
  }
View Full Code Here

      }

      checkPermissions(getConf(), path, actions);

    } catch (AccessControlException ex) {
      throw new AuthorizationException(ex);
    } catch (LoginException ex) {
      throw new AuthorizationException(ex);
    } catch (IOException ex) {
      throw new HiveException(ex);
    }
  }
View Full Code Here

        }
        if (requiredInputPrivileges.containsKey(getAuthzType(inputHierarchy))) {
          EnumSet<DBModelAction> inputPrivSet =
            requiredInputPrivileges.get(getAuthzType(inputHierarchy));
          if (!authProvider.hasAccess(subject, inputHierarchy, inputPrivSet)) {
            throw new AuthorizationException("User " + subject.getName() +
                " does not have privileges for " + hiveOp.name());
          }
        }
      }
      // Check write entities
      Map<AuthorizableType, EnumSet<DBModelAction>> requiredOutputPrivileges =
          stmtAuthPrivileges.getOutputPrivileges();
      for (List<DBModelAuthorizable> outputHierarchy : outputHierarchyList) {
        if(isDebug) {
          LOG.debug("requiredOutputPrivileges = " + requiredOutputPrivileges);
          LOG.debug("outputHierarchy = " + outputHierarchy);
          LOG.debug("getAuthzType(outputHierarchy) = " + getAuthzType(outputHierarchy));
        }
        if (requiredOutputPrivileges.containsKey(getAuthzType(outputHierarchy))) {
          EnumSet<DBModelAction> outputPrivSet =
            requiredOutputPrivileges.get(getAuthzType(outputHierarchy));
          if (!authProvider.hasAccess(subject, outputHierarchy, outputPrivSet)) {
            throw new AuthorizationException("User " + subject.getName() +
                " does not have priviliedges for " + hiveOp.name());
          }
        }
      }
  }
View Full Code Here

      outputHierarchy.add(connectHierarchy);
      break;

    default:
      throw new AuthorizationException("Unknown operation scope type " +
          stmtAuthObject.getOperationScope().toString());
    }

    // validate permission
    hiveAuthzBinding.authorize(stmtOperation, stmtAuthObject, getCurrentSubject(context),
View Full Code Here

    for (String hiveUDF : Splitter.on(",").omitEmptyStrings().trimResults().split(whiteList)) {
      if (queryUDF.equalsIgnoreCase(hiveUDF)) {
        return; // found the given UDF in whitelist
      }
    }
    throw new AuthorizationException("The UDF " + queryUDF + " is not found in the list of allowed UDFs");
  }
View Full Code Here

    case DFS_DIR:
    case LOCAL_DIR:
      try {
        objectHierarchy.add(parseURI(entity.toString()));
      } catch (Exception e) {
        throw new AuthorizationException("Failed to get File URI", e);
      }
      break;
    default:
      throw new UnsupportedOperationException("Unsupported entity type " +
          entity.getType().name());
View Full Code Here

        if (PathUtils.impliesURI(localScratchURI, localRequestURI)) {
          return true;
        }
      }
    } catch (Exception e) {
      throw new AuthorizationException("Failed to extract uri details", e);
    }
    return false;
  }
View Full Code Here

    for(AuthorizableType key: requiredInputPrivileges.keySet()) {
      for (List<DBModelAuthorizable> inputHierarchy : inputHierarchyList) {
        if (getAuthzType(inputHierarchy).equals(key)) {
          found = true;
          if (!authProvider.hasAccess(subject, inputHierarchy, requiredInputPrivileges.get(key), activeRoleSet)) {
            throw new AuthorizationException("User " + subject.getName() +
                " does not have privileges for " + hiveOp.name());
          }
        }
      }
      if(!found && !(key.equals(AuthorizableType.URI)) &&  !(hiveOp.equals(HiveOperation.QUERY))) {
        //URI privileges are optional for some privileges: anyPrivilege, tableDDLAndOptionalUriPrivilege
        //Query can mean select/insert/analyze where all of them have different required privileges.
        //For these alone we skip if there is no equivalent input privilege
        //TODO: Even this case should be handled to make sure we do not skip the privilege check if we did not build
        //the input privileges correctly
        throw new AuthorizationException("Required privilege( " + key.name() + ") not available in input privileges");
      }
      found = false;
    }

    for(AuthorizableType key: requiredOutputPrivileges.keySet()) {
      for (List<DBModelAuthorizable> outputHierarchy : outputHierarchyList) {
        if (getAuthzType(outputHierarchy).equals(key)) {
          found = true;
          if (!authProvider.hasAccess(subject, outputHierarchy, requiredOutputPrivileges.get(key), activeRoleSet)) {
            throw new AuthorizationException("User " + subject.getName() +
                " does not have privileges for " + hiveOp.name());
          }
        }
      }
      if(!found && !(key.equals(AuthorizableType.URI)) &&  !(hiveOp.equals(HiveOperation.QUERY))) {
        //URI privileges are optional for some privileges: tableInsertPrivilege
        //Query can mean select/insert/analyze where all of them have different required privileges.
        //For these alone we skip if there is no equivalent output privilege
        //TODO: Even this case should be handled to make sure we do not skip the privilege check if we did not build
        //the output privileges correctly
        throw new AuthorizationException("Required privilege( " + key.name() + ") not available in output privileges");
      }
      found = false;
    }

  }
View Full Code Here

      outputHierarchy.add(connectHierarchy);
      break;

    default:
      throw new AuthorizationException("Unknown operation scope type " +
          stmtAuthObject.getOperationScope().toString());
    }

    // validate permission
    hiveAuthzBinding.authorize(stmtOperation, stmtAuthObject, getCurrentSubject(context),
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.metadata.AuthorizationException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.