Examples of readFields()


Examples of org.apache.hadoop.mapred.ShuffleHeader.readFields()

        long compressedLength = -1;
        int forReduce = -1;
        boolean found = false;
        try {
          ShuffleHeader header = new ShuffleHeader();
          header.readFields(input);
          // Special case where the map output was not found
          if (header.found == false) {
            LOG.warn("getMapOutput: Header for " + mapOutputLoc + " indicates" +
                "the map output can't be found, indicating a serious error.");
            return new MapOutputStatus(null,
View Full Code Here

Examples of org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier.readFields()

    @Override
    public boolean isManaged(Token<?> token) throws IOException {
      ByteArrayInputStream buf = new ByteArrayInputStream(token.getIdentifier());
      DelegationTokenIdentifier id = new DelegationTokenIdentifier();
      id.readFields(new DataInputStream(buf));
      // AbstractDelegationToken converts given renewer to a short name, but
      // AbstractDelegationTokenSecretManager does not, so we have to do it
      String loginUser = UserGroupInformation.getLoginUser().getShortUserName();
      return loginUser.equals(id.getRenewer().toString());
    }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitIndex.readFields()

        new LocalDirAllocator("mapred.local.dir").getLocalPathToRead(
            TaskTracker.getLocalSplitFile(conf.getUser(), taskId.getJobID()
                .toString(), taskId.toString()), conf);
    DataInputStream splitFile = FileSystem.getLocal(conf).open(localMetaSplit);
    TaskSplitIndex splitIndex = new TaskSplitIndex();
    splitIndex.readFields(splitFile);
    splitFile.close();
    Task task =
      new MapTask(jobFilename.toString(), taskId, partition, splitIndex, 1);
    task.setConf(conf);
    task.run(conf, new FakeUmbilical());
View Full Code Here

Examples of org.apache.hadoop.mapreduce.task.reduce.ShuffleHeader.readFields()

    conn.connect();
    DataInputStream input = new DataInputStream(conn.getInputStream());
    Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
    Assert.assertEquals("close", conn.getHeaderField(HttpHeaders.CONNECTION));
    ShuffleHeader header = new ShuffleHeader();
    header.readFields(input);
    input.close();

    shuffleHandler.stop();
    Assert.assertTrue("sendError called when client closed connection",
        failures.size() == 0);
View Full Code Here

Examples of org.apache.hadoop.mapreduce.v2.api.MRDelegationTokenIdentifier.readFields()

    long renewDate;
    byte[] tokenData = readFile(tokenFile, numTokenFileBytes);
    DataInputStream in =
        new DataInputStream(new ByteArrayInputStream(tokenData));
    try {
      tokenId.readFields(in);
      renewDate = in.readLong();
    } finally {
      IOUtils.cleanup(LOG, in);
    }
    state.tokenState.put(tokenId, renewDate);
View Full Code Here

Examples of org.apache.hadoop.security.Credentials.readFields()

      URLConnection connection = remoteURL.openConnection();

      InputStream in = connection.getInputStream();
      Credentials ts = new Credentials();
      dis = new DataInputStream(in);
      ts.readFields(dis);
      return ts;
    } catch (Exception e) {
      throw new IOException("Unable to obtain remote token", e);
    } finally {
      if(dis != null) dis.close();
View Full Code Here

Examples of org.apache.hadoop.security.TokenStorage.readFields()

      URLConnection connection = remoteURL.openConnection();
     
      InputStream in = connection.getInputStream();
      TokenStorage ts = new TokenStorage();
      dis = new DataInputStream(in);
      ts.readFields(dis);
      file = new DataOutputStream(new FileOutputStream(filename));
      ts.write(file);
      file.flush();
      System.out.println("Successfully wrote token of " + file.size()
          + " bytes  to " + filename);
View Full Code Here

Examples of org.apache.hadoop.security.authorize.AccessControlList.readFields()

    // De-serialize the job's ACLs
    int numACLs = in.readInt();
    for (int i = 0; i < numACLs; i++) {
      JobACL aclType = WritableUtils.readEnum(in, JobACL.class);
      AccessControlList acl = new AccessControlList(" ");
      acl.readFields(in);
      this.jobACLs.put(aclType, acl);
    }
  }

  // A utility to convert new job runstates to the old ones.
View Full Code Here

Examples of org.apache.hadoop.security.token.TokenIdentifier.readFields()

      Token<? extends TokenIdentifier> token,
      Class<? extends TokenIdentifier> cls) throws IOException {
    TokenIdentifier tokenIdentifier = ReflectionUtils.newInstance(cls, null);
    ByteArrayInputStream buf = new ByteArrayInputStream(token.getIdentifier());
    DataInputStream in = new DataInputStream(buf);
    tokenIdentifier.readFields(in);
    in.close();
    return tokenIdentifier;
  }

  /** Factory for {@link SaslServerHandler} */
 
View Full Code Here

Examples of org.apache.hadoop.security.token.delegation.DelegationKey.readFields()

   */
  private synchronized void loadAllKeys(DataInputStream in) throws IOException {
    int numberOfKeys = in.readInt();
    for (int i = 0; i < numberOfKeys; i++) {
      DelegationKey value = new DelegationKey();
      value.readFields(in);
      addKey(value);
    }
  }

  /**
 
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.