Package org.apache.hadoop.security

Examples of org.apache.hadoop.security.AccessControlException


        //this is not allowed if user authenticated with DIGEST.
        if ((protocolUser != null)
            && (!protocolUser.getUserName().equals(user.getUserName()))) {
          if (authMethod == AuthMethod.DIGEST) {
            // Not allowed to doAs if token authentication is used
            throw new AccessControlException("Authenticated user (" + user
                + ") doesn't match what the client claims to be ("
                + protocolUser + ")");
          } else {
            // Effective user can be different from authenticated user
            // for simple auth or kerberos auth
View Full Code Here


        processConnectionHeader(buf);
        this.connectionHeaderRead = true;
        if (!authorizeConnection()) {
          // Throw FatalConnectionException wrapping ACE so client does right thing and closes
          // down the connection instead of trying to read non-existent retun.
          throw new AccessControlException("Connection from " + this + " for service " +
            connectionHeader.getServiceName() + " is unauthorized for user: " + user);
        }
      }
    }
View Full Code Here

      } catch (AccessControlException ace) {
        LOG.warn("Got AccessControlException when preCheckPermission ", ace);
        errors.reportError(ERROR_CODE.WRONG_USAGE, "Current user " + ugi.getUserName()
          + " does not have write perms to " + file.getPath()
          + ". Please rerun hbck as hdfs user " + file.getOwner());
        throw new AccessControlException(ace);
      }
    }
  }
View Full Code Here

      } catch (AccessControlException ace) {
        LOG.warn("Got AccessControlException when preCheckPermission ", ace);
        errors.reportError(ERROR_CODE.WRONG_USAGE, "Current user " + user.getShortName()
          + " does not have write perms to " + file.getPath()
          + ". Please rerun hbck as hdfs user " + file.getOwner());
        throw new AccessControlException(ace);
      }
    }
  }
View Full Code Here

        return;
      }
    } else if (file.getPermission().getOtherAction().implies(action)) {
      return;
    }
    throw new AccessControlException("Permission denied:" + " action=" + action
        + " path=" + file.getPath() + " user=" + username);
  }
View Full Code Here

  private void checkAccess(JobInProgress job, QueueManager.QueueOperation oper,
                           UserGroupInformation ugi) throws IOException {
    // get the queue
    String queue = job.getProfile().getQueueName();
    if (!queueManager.hasAccess(queue, job, oper, ugi)) {
      throw new AccessControlException("User "
                            + ugi.getUserName()
                            + " cannot perform "
                            + "operation " + oper + " on queue " + queue +
                            ".\n Please run \"hadoop queue -showacls\" " +
                            "command to find the queues you have access" +
View Full Code Here

      }
      src = FSDirectory.resolvePath(src, pathComponents, dir);
      checkOwner(pc, src);
      if (!pc.isSuperUser()) {
        if (username != null && !pc.getUser().equals(username)) {
          throw new AccessControlException("Non-super user cannot change owner");
        }
        if (group != null && !pc.containsGroup(group)) {
          throw new AccessControlException("User does not belong to " + group);
        }
      }
      dir.setOwner(src, username, group);
      resultingStat = getAuditFileInfo(src, false);
    } finally {
View Full Code Here

  private FSPermissionChecker getPermissionChecker()
      throws AccessControlException {
    try {
      return new FSPermissionChecker(fsOwnerShortUserName, supergroup, getRemoteUser());
    } catch (IOException ioe) {
      throw new AccessControlException(ioe);
    }
  }
View Full Code Here

    // Careful! Locking order is important!

    // Check queue ACLs
    UserGroupInformation userUgi = UserGroupInformation.createRemoteUser(userName);
    if (!hasAccess(QueueACL.SUBMIT_APPLICATIONS, userUgi)) {
      throw new AccessControlException("User " + userName + " cannot submit" +
          " applications to queue " + getQueuePath());
    }

    User user = null;
    synchronized (this) {

      // Check if the queue is accepting jobs
      if (getState() != QueueState.RUNNING) {
        String msg = "Queue " + getQueuePath() +
        " is STOPPED. Cannot accept submission of application: " +
        application.getApplicationId();
        LOG.info(msg);
        throw new AccessControlException(msg);
      }

      // Check submission limits for queues
      if (getNumApplications() >= getMaxApplications()) {
        String msg = "Queue " + getQueuePath() +
        " already has " + getNumApplications() + " applications," +
        " cannot accept submission of application: " +
        application.getApplicationId();
        LOG.info(msg);
        throw new AccessControlException(msg);
      }

      // Check submission limits for the user on this queue
      user = getUser(userName);
      if (user.getTotalApplications() >= getMaxApplicationsPerUser()) {
        String msg = "Queue " + getQueuePath() +
        " already has " + user.getTotalApplications() +
        " applications from user " + userName +
        " cannot accept submission of application: " +
        application.getApplicationId();
        LOG.info(msg);
        throw new AccessControlException(msg);
      }

      // Add the application to our data-structures
      addApplication(application, user);
    }
View Full Code Here

    } catch (FileNotFoundException fnfe) {
      // File named by path doesn't exist; nothing to validate.
      return;
    } catch (org.apache.hadoop.fs.permission.AccessControlException ace) {
      // Older hadoop version will throw this @deprecated Exception.
      throw new AccessControlException(ace.getMessage());
    }

    final FsPermission dirPerms = stat.getPermission();
    final String grp = stat.getGroup();

    for (FsAction action : actions) {
      if (user.equals(stat.getOwner())) {
        if (dirPerms.getUserAction().implies(action)) {
          continue;
        }
      }
      if (ArrayUtils.contains(groups, grp)) {
        if (dirPerms.getGroupAction().implies(action)) {
          continue;
        }
      }
      if (dirPerms.getOtherAction().implies(action)) {
        continue;
      }
      throw new AccessControlException("action " + action + " not permitted on path "
        + path + " for user " + user);
    }
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.security.AccessControlException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.