Examples of UnixUserGroupInformation


Examples of org.apache.hadoop.security.UnixUserGroupInformation

  /** Get {@link UserGroupInformation} from request */
  protected UnixUserGroupInformation getUGI(HttpServletRequest request) {
    String ugi = request.getParameter("ugi");
    try {
      return new UnixUserGroupInformation(ugi.split(","));
    }
    catch(Exception e) {
      LOG.warn("Invalid ugi (= " + ugi + ")");
    }
    return JspHelper.webUGI;
View Full Code Here

Examples of org.apache.hadoop.security.UnixUserGroupInformation

   *   </listing>
   * }
   */
  public void doGet(HttpServletRequest request, HttpServletResponse response)
    throws ServletException, IOException {
    final UnixUserGroupInformation ugi = getUGI(request);
    final PrintWriter out = response.getWriter();
    final XMLOutputter doc = new XMLOutputter(out, "UTF-8");
    try {
      final Map<String, String> root = buildRoot(request, doc);
      final String path = root.get("path");
View Full Code Here

Examples of org.apache.hadoop.security.UnixUserGroupInformation

   * GET http://<nn>:<port>/data[/<path>] HTTP/1.1
   * }
   */
  public void doGet(HttpServletRequest request, HttpServletResponse response)
    throws IOException {
    final UnixUserGroupInformation ugi = getUGI(request);
    final ClientProtocol nnproxy = createNameNodeProxy(ugi);

    try {
      final String path = request.getPathInfo() != null
        ? request.getPathInfo() : "/";
View Full Code Here

Examples of org.apache.hadoop.security.UnixUserGroupInformation

    private static final long serialVersionUID = 1L;
   
    /** {@inheritDoc} */
    public void doGet(HttpServletRequest request, HttpServletResponse response
        ) throws ServletException, IOException {
      final UnixUserGroupInformation ugi = getUGI(request);
      final PrintWriter out = response.getWriter();
      final String filename = getFilename(request, response);
      final XMLOutputter xml = new XMLOutputter(out, "UTF-8");
      xml.declaration();

View Full Code Here

Examples of org.apache.hadoop.security.UnixUserGroupInformation

    return 0;
  }

  private static UnixUserGroupInformation getUGI(Configuration conf)
  throws IOException {
    UnixUserGroupInformation ugi = null;
    try {
      ugi = UnixUserGroupInformation.login(conf, true);
    } catch (LoginException e) {
      throw (IOException)(new IOException(
          "Failed to get the current user's information.").initCause(e));
View Full Code Here

Examples of org.apache.hadoop.security.UnixUserGroupInformation

   * accessed using this user's id
   * @param job
   * @throws IOException
   */
  private void configureUserName(JobConf job) throws IOException {
    UnixUserGroupInformation ugi = getUGI(job);
   // Set the user's name, group and working directory
    job.setUser(ugi.getUserName());
    if (ugi.getGroupNames() != null && ugi.getGroupNames().length > 0) {
      job.set("group.name", ugi.getGroupNames()[0]);
    }
    if (job.getWorkingDirectory() == null) {
      job.setWorkingDirectory(fs.getWorkingDirectory());
    }
  }
View Full Code Here

Examples of org.apache.hadoop.security.UnixUserGroupInformation

   
    /*
     * set this user's id in job configuration, so later job files can be
     * accessed using this user's id
     */
    UnixUserGroupInformation ugi = getUGI(job);
     
    //
    // Figure out what fs the JobTracker is using.  Copy the
    // job to it, under a temporary name.  This allows DFS to work,
    // and under the local fs also provides UNIX-like object loading
    // semantics.  (that is, if the job file is deleted right after
    // submission, we can still run the submission to completion)
    //

    // Create a number of filenames in the JobTracker's fs namespace
    FileSystem fs = getFs();
    LOG.debug("default FileSystem: " + fs.getUri());
    fs.delete(submitJobDir, true);
    submitJobDir = fs.makeQualified(submitJobDir);
    submitJobDir = new Path(submitJobDir.toUri().getPath());
    FsPermission mapredSysPerms = new FsPermission(JOB_DIR_PERMISSION);
    FileSystem.mkdirs(fs, submitJobDir, mapredSysPerms);
    Path filesDir = new Path(submitJobDir, "files");
    Path archivesDir = new Path(submitJobDir, "archives");
    Path libjarsDir = new Path(submitJobDir, "libjars");
    short replication = (short)job.getInt("mapred.submit.replication", 10);
    // add all the command line files/ jars and archive
    // first copy them to jobtrackers filesystem
   
    if (files != null) {
      FileSystem.mkdirs(fs, filesDir, mapredSysPerms);
      String[] fileArr = files.split(",");
      for (String tmpFile: fileArr) {
        Path tmp = new Path(tmpFile);
        Path newPath = copyRemoteFiles(fs,filesDir, tmp, job, replication);
        try {
          URI pathURI = new URI(newPath.toUri().toString() + "#" + newPath.getName());
          DistributedCache.addCacheFile(pathURI, job);
        } catch(URISyntaxException ue) {
          //should not throw a uri exception
          throw new IOException("Failed to create uri for " + tmpFile);
        }
        DistributedCache.createSymlink(job);
      }
    }
   
    if (libjars != null) {
      FileSystem.mkdirs(fs, libjarsDir, mapredSysPerms);
      String[] libjarsArr = libjars.split(",");
      for (String tmpjars: libjarsArr) {
        Path tmp = new Path(tmpjars);
        Path newPath = copyRemoteFiles(fs, libjarsDir, tmp, job, replication);
        DistributedCache.addArchiveToClassPath(newPath, job);
      }
    }
   
   
    if (archives != null) {
     FileSystem.mkdirs(fs, archivesDir, mapredSysPerms);
     String[] archivesArr = archives.split(",");
     for (String tmpArchives: archivesArr) {
       Path tmp = new Path(tmpArchives);
       Path newPath = copyRemoteFiles(fs, archivesDir, tmp, job, replication);
       try {
         URI pathURI = new URI(newPath.toUri().toString() + "#" + newPath.getName());
         DistributedCache.addCacheArchive(pathURI, job);
       } catch(URISyntaxException ue) {
         //should not throw an uri excpetion
         throw new IOException("Failed to create uri for " + tmpArchives);
       }
       DistributedCache.createSymlink(job);
     }
    }
   
    //  set the timestamps of the archives and files
    URI[] tarchives = DistributedCache.getCacheArchives(job);
    if (tarchives != null) {
      StringBuffer archiveTimestamps =
        new StringBuffer(String.valueOf(DistributedCache.getTimestamp(job, tarchives[0])));
      for (int i = 1; i < tarchives.length; i++) {
        archiveTimestamps.append(",");
        archiveTimestamps.append(String.valueOf(DistributedCache.getTimestamp(job, tarchives[i])));
      }
      DistributedCache.setArchiveTimestamps(job, archiveTimestamps.toString());
    }

    URI[] tfiles = DistributedCache.getCacheFiles(job);
    if (tfiles != null) {
      StringBuffer fileTimestamps =
        new StringBuffer(String.valueOf(DistributedCache.getTimestamp(job, tfiles[0])));
      for (int i = 1; i < tfiles.length; i++) {
        fileTimestamps.append(",");
        fileTimestamps.append(String.valueOf(DistributedCache.getTimestamp(job, tfiles[i])));
      }
      DistributedCache.setFileTimestamps(job, fileTimestamps.toString());
    }
      
    String originalJarPath = job.getJar();

    if (originalJarPath != null) {           // copy jar to JobTracker's fs
      // use jar name if job is not named.
      if ("".equals(job.getJobName())){
        job.setJobName(new Path(originalJarPath).getName());
      }
      job.setJar(submitJarFile.toString());
      fs.copyFromLocalFile(new Path(originalJarPath), submitJarFile);
      fs.setReplication(submitJarFile, replication);
      fs.setPermission(submitJarFile, new FsPermission(JOB_FILE_PERMISSION));
    } else {
      LOG.warn("No job jar file set.  User classes may not be found. "+
               "See JobConf(Class) or JobConf#setJar(String).");
    }

    // Set the user's name and working directory
    job.setUser(ugi.getUserName());
    if (ugi.getGroupNames().length > 0) {
      job.set("group.name", ugi.getGroupNames()[0]);
    }
    if (job.getWorkingDirectory() == null) {
      job.setWorkingDirectory(fs.getWorkingDirectory());         
    }

View Full Code Here

Examples of org.apache.hadoop.security.UnixUserGroupInformation

    }

  }

  private UnixUserGroupInformation getUGI(Configuration job) throws IOException {
    UnixUserGroupInformation ugi = null;
    try {
      ugi = UnixUserGroupInformation.login(job, true);
    } catch (LoginException e) {
      throw (IOException)(new IOException(
          "Failed to get the current user's information.").initCause(e));
View Full Code Here

Examples of org.apache.hadoop.security.UnixUserGroupInformation

    Configuration c2 = new Configuration(c);
    String username = UserGroupInformation.getCurrentUGI().getUserName() +
      differentiatingSuffix;
    UnixUserGroupInformation.saveToConf(c2,
      UnixUserGroupInformation.UGI_PROPERTY_NAME,
      new UnixUserGroupInformation(username, new String[]{"supergroup"}));
    return c2;
  }
View Full Code Here

Examples of org.apache.hadoop.security.UnixUserGroupInformation

      // code is taken from the AppendTestUtil over in hdfs.
      Configuration c2 = new Configuration(c);
      String username = UGI.getUserName() + ".hrs." + index++;
      UnixUserGroupInformation.saveToConf(c2,
        UnixUserGroupInformation.UGI_PROPERTY_NAME,
        new UnixUserGroupInformation(username, new String[]{"supergroup"}));
      return c2;
    }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.