Package org.apache.hadoop.security

Examples of org.apache.hadoop.security.Credentials


        OutputCommitter committer, boolean newApiCommitter,
        String user, AppContext appContext,
        JobStateInternal forcedState, String diagnostic) {
      super(jobId, getApplicationAttemptId(applicationId, getStartCount()),
          conf, eventHandler, taskAttemptListener,
          new JobTokenSecretManager(), new Credentials(), clock,
          getCompletedTaskFromPreviousRun(), metrics, committer,
          newApiCommitter, user, System.currentTimeMillis(), getAllAMInfos(),
          appContext, forcedState, diagnostic);

      // This "this leak" is okay because the retained pointer is in an
View Full Code Here


            RMAppEventType.START));
  }
 
  private Credentials parseCredentials(ApplicationSubmissionContext application)
      throws IOException {
    Credentials credentials = new Credentials();
    DataInputByteBuffer dibb = new DataInputByteBuffer();
    ByteBuffer tokens = application.getAMContainerSpec().getTokens();
    if (tokens != null) {
      dibb.reset(tokens);
      credentials.readTokenStorageStream(dibb);
      tokens.rewind();
    }
    return credentials;
  }
View Full Code Here

      if (!(store instanceof Credentials)) {
        LOG.warn("Only Hadoop Credentials is supported. Ignore update for {}.", cell);
        continue;
      }

      Credentials credentials = (Credentials) store;
      if (credentials.getAllTokens().isEmpty()) {
        // Nothing to update.
        continue;
      }

      try {
View Full Code Here

  private void updateCredentials(String application, RunId runId, Credentials updates) throws IOException {
    Location credentialsLocation = locationFactory.create(String.format("/%s/%s/%s", application, runId.getId(),
                                                                        Constants.Files.CREDENTIALS));
    // Try to read the old credentials.
    Credentials credentials = new Credentials();
    if (credentialsLocation.exists()) {
      DataInputStream is = new DataInputStream(new BufferedInputStream(credentialsLocation.getInputStream()));
      try {
        credentials.readTokenStorageStream(is);
      } finally {
        is.close();
      }
    }

    // Overwrite with the updates.
    credentials.addAll(updates);

    // Overwrite the credentials.
    Location tmpLocation = credentialsLocation.getTempFile(Constants.Files.CREDENTIALS);

    // Save the credentials store with user-only permission.
    DataOutputStream os = new DataOutputStream(new BufferedOutputStream(tmpLocation.getOutputStream("600")));
    try {
      credentials.writeTokenStorageToStream(os);
    } finally {
      os.close();
    }

    // Rename the tmp file into the credentials location
View Full Code Here

    }
    return nextTimeoutCheck + Constants.PROVISION_TIMEOUT;
  }

  private Credentials createCredentials() {
    Credentials credentials = new Credentials();
    if (!UserGroupInformation.isSecurityEnabled()) {
      return credentials;
    }

    try {
      credentials.addAll(UserGroupInformation.getCurrentUser().getCredentials());

      // Remove the AM->RM tokens
      Iterator<Token<?>> iter = credentials.getAllTokens().iterator();
      while (iter.hasNext()) {
        Token<?> token = iter.next();
        if (token.getKind().equals(AMRM_TOKEN_KIND_NAME)) {
          iter.remove();
        }
View Full Code Here

      throw Throwables.propagate(e);
    }
  }

  private Credentials createCredentials() {
    Credentials credentials = new Credentials();

    try {
      credentials.addAll(UserGroupInformation.getCurrentUser().getCredentials());

      List<Token<?>> tokens = YarnUtils.addDelegationTokens(yarnConfig, locationFactory, credentials);
      for (Token<?> token : tokens) {
        LOG.debug("Delegation token acquired for {}, {}", locationFactory.getHomeLocation().toURI(), token);
      }
View Full Code Here

    JobId jobId = TypeConverter.toYarn(jobID);
    MRAppMetrics mrAppMetrics = MRAppMetrics.create();
    JobImpl job =
        new JobImpl(jobId, ApplicationAttemptId.newInstance(
          ApplicationId.newInstance(0, 0), 0), conf, mock(EventHandler.class),
          null, new JobTokenSecretManager(), new Credentials(), null, null,
          mrAppMetrics, null, true, null, 0, null, null, null, null);
    InitTransition initTransition = getInitTransition(2);
    JobEvent mockJobEvent = mock(JobEvent.class);
    initTransition.transition(job, mockJobEvent);
    boolean isUber = job.isUber();
View Full Code Here

    public StubbedJob(JobId jobId, ApplicationAttemptId applicationAttemptId,
        Configuration conf, EventHandler eventHandler, boolean newApiCommitter,
        String user, int numSplits, AppContext appContext) {
      super(jobId, applicationAttemptId, conf, eventHandler,
          null, new JobTokenSecretManager(), new Credentials(),
          new SystemClock(), Collections.<TaskId, TaskInfo> emptyMap(),
          MRAppMetrics.create(), null, newApiCommitter, user,
          System.currentTimeMillis(), null, appContext, null, null);

      initTransition = getInitTransition(numSplits);
View Full Code Here

        PigHCatUtil.getConfigFromUDFProperties(udfProps,
          job.getConfiguration(), emr.nextElement().toString());
      }
      if (!HCatUtil.checkJobContextIfRunningFromBackend(job)) {
        //Combine credentials and credentials from job takes precedence for freshness
        Credentials crd = jobCredentials.get(INNER_SIGNATURE_PREFIX + "_" + signature);
        crd.addAll(job.getCredentials());
        job.getCredentials().addAll(crd);
      }
    } else {
      Job clone = new Job(job.getConfiguration());
      HCatInputFormat.setInput(job, dbName, tableName).setFilter(getPartitionFilterString());

      // We will store all the new /changed properties in the job in the
      // udf context, so the the HCatInputFormat.setInput method need not
      //be called many times.
      for (Entry<String, String> keyValue : job.getConfiguration()) {
        String oldValue = clone.getConfiguration().getRaw(keyValue.getKey());
        if ((oldValue == null) || (keyValue.getValue().equals(oldValue) == false)) {
          udfProps.put(keyValue.getKey(), keyValue.getValue());
        }
      }
      udfProps.put(HCatConstants.HCAT_PIG_LOADER_LOCATION_SET, true);

      //Store credentials in a private hash map and not the udf context to
      // make sure they are not public.
      Credentials crd = new Credentials();
      crd.addAll(job.getCredentials());
      jobCredentials.put(INNER_SIGNATURE_PREFIX + "_" + signature, crd);
    }

    // Need to also push projections by calling setOutputSchema on
    // HCatInputFormat - we have to get the RequiredFields information
View Full Code Here

    if (udfProps.containsKey(HCatConstants.HCAT_PIG_STORER_LOCATION_SET)) {
      for (Enumeration<Object> emr = udfProps.keys(); emr.hasMoreElements(); ) {
        PigHCatUtil.getConfigFromUDFProperties(udfProps, config, emr.nextElement().toString());
      }
      Credentials crd = jobCredentials.get(INNER_SIGNATURE_PREFIX + "_" + sign);
      if (crd != null) {
        job.getCredentials().addAll(crd);
      }
    } else {
      Job clone = new Job(job.getConfiguration());
View Full Code Here

TOP

Related Classes of org.apache.hadoop.security.Credentials

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.