Examples of DFSClient


Examples of org.apache.hadoop.hdfs.DFSClient

    if (!noLink)
      out.print("<input type=\"hidden\" name=\"referrer\" value=\"" + referrer
          + "\">");

    // fetch the block from the datanode that has the last block for this file
    final DFSClient dfs = getDFSClient(ugi, nnAddr, conf);
    List<LocatedBlock> blocks = dfs.getNamenode().getBlockLocations(filename, 0,
        Long.MAX_VALUE).getLocatedBlocks();
    if (blocks == null || blocks.size() == 0) {
      out.print("No datanodes contain blocks of file " + filename);
      dfs.close();
      return;
    }
    LocatedBlock lastBlk = blocks.get(blocks.size() - 1);
    String poolId = lastBlk.getBlock().getBlockPoolId();
    long blockSize = lastBlk.getBlock().getNumBytes();
    long blockId = lastBlk.getBlock().getBlockId();
    Token<BlockTokenIdentifier> accessToken = lastBlk.getBlockToken();
    long genStamp = lastBlk.getBlock().getGenerationStamp();
    DatanodeInfo chosenNode;
    try {
      chosenNode = JspHelper.bestNode(lastBlk, conf);
    } catch (IOException e) {
      out.print(e.toString());
      dfs.close();
      return;
    }
    InetSocketAddress addr =
      NetUtils.createSocketAddr(chosenNode.getXferAddr());
    // view the last chunkSizeToView bytes while Tailing
    final long startOffset = blockSize >= chunkSizeToView ? blockSize
        - chunkSizeToView : 0;

    out.print("<textarea cols=\"100\" rows=\"25\" wrap=\"virtual\" style=\"width:100%\" READONLY>");
    JspHelper.streamBlockInAscii(addr, poolId, blockId, accessToken, genStamp,
        blockSize, startOffset, chunkSizeToView, out, conf, dfs.getDataEncryptionKey());
    out.print("</textarea>");
    dfs.close();
  }
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSClient

          HdfsServerConstants.READ_TIMEOUT);
      final SocketFactory socketFactory = NetUtils.getSocketFactory(conf,
          ClientProtocol.class);
     
      try {
        final DFSClient dfs = DatanodeJspHelper.getDFSClient(request,
            datanode, conf, getUGI(request, conf));
        final ClientProtocol nnproxy = dfs.getNamenode();
        final MD5MD5CRC32FileChecksum checksum = DFSClient.getFileChecksum(
            path, nnproxy, socketFactory, socketTimeout, dfs.getDataEncryptionKey(), false);
        MD5MD5CRC32FileChecksum.write(xml, checksum);
      } catch(IOException ioe) {
        writeXml(ioe, path, xml);
      } catch (InterruptedException e) {
        writeXml(e, path, xml);
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSClient

    iug = new IdUserGroup();
   
    exports = NfsExports.getInstance(config);
    writeManager = new WriteManager(iug, config);
    clientCache = new DFSClientCache(config);
    superUserClient = new DFSClient(NameNode.getAddress(config), config);
    replication = (short) config.getInt(DFSConfigKeys.DFS_REPLICATION_KEY,
        DFSConfigKeys.DFS_REPLICATION_DEFAULT);
    blockSize = config.getLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY,
        DFSConfigKeys.DFS_BLOCK_SIZE_DEFAULT);
    bufferSize = config.getInt(
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSClient

    if (!checkAccessPrivilege(client, AccessPrivilege.READ_ONLY)) {
      response.setStatus(Nfs3Status.NFS3ERR_ACCES);
      return response;
    }
   
    DFSClient dfsClient = clientCache.getDfsClient(securityHandler.getUser());
    if (dfsClient == null) {
      response.setStatus(Nfs3Status.NFS3ERR_SERVERFAULT);
      return response;
    }
   
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSClient

  @Override
  public SETATTR3Response setattr(XDR xdr, SecurityHandler securityHandler,
      InetAddress client) {
    SETATTR3Response response = new SETATTR3Response(Nfs3Status.NFS3_OK);
    DFSClient dfsClient = clientCache.getDfsClient(securityHandler.getUser());
    if (dfsClient == null) {
      response.setStatus(Nfs3Status.NFS3ERR_SERVERFAULT);
      return response;
    }
   
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSClient

    if (!checkAccessPrivilege(client, AccessPrivilege.READ_ONLY)) {
      response.setStatus(Nfs3Status.NFS3ERR_ACCES);
      return response;
    }
   
    DFSClient dfsClient = clientCache.getDfsClient(securityHandler.getUser());
    if (dfsClient == null) {
      response.setStatus(Nfs3Status.NFS3ERR_SERVERFAULT);
      return response;
    }
   
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSClient

    if (!checkAccessPrivilege(client, AccessPrivilege.READ_ONLY)) {
      response.setStatus(Nfs3Status.NFS3ERR_ACCES);
      return response;
    }
   
    DFSClient dfsClient = clientCache.getDfsClient(securityHandler.getUser());
    if (dfsClient == null) {
      response.setStatus(Nfs3Status.NFS3ERR_SERVERFAULT);
      return response;
    }
   
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSClient

    if (!checkAccessPrivilege(client, AccessPrivilege.READ_ONLY)) {
      response.setStatus(Nfs3Status.NFS3ERR_ACCES);
      return response;
    }

    DFSClient dfsClient = clientCache.getDfsClient(securityHandler.getUser());
    if (dfsClient == null) {
      response.setStatus(Nfs3Status.NFS3ERR_SERVERFAULT);
      return response;
    }

    READLINK3Request request = null;

    try {
      request = new READLINK3Request(xdr);
    } catch (IOException e) {
      LOG.error("Invalid READLINK request");
      return new READLINK3Response(Nfs3Status.NFS3ERR_INVAL);
    }

    FileHandle handle = request.getHandle();
    if (LOG.isDebugEnabled()) {
      LOG.debug("NFS READLINK fileId: " + handle.getFileId());
    }

    String fileIdPath = Nfs3Utils.getFileIdPath(handle);
    try {
      String target = dfsClient.getLinkTarget(fileIdPath);

      Nfs3FileAttributes postOpAttr = Nfs3Utils.getFileAttr(dfsClient,
          fileIdPath, iug);
      if (postOpAttr == null) {
        LOG.info("Can't get path for fileId:" + handle.getFileId());
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSClient

    if (!checkAccessPrivilege(client, AccessPrivilege.READ_ONLY)) {
      response.setStatus(Nfs3Status.NFS3ERR_ACCES);
      return response;
    }
   
    DFSClient dfsClient = clientCache.getDfsClient(userName);
    if (dfsClient == null) {
      response.setStatus(Nfs3Status.NFS3ERR_SERVERFAULT);
      return response;
    }
   
View Full Code Here

Examples of org.apache.hadoop.hdfs.DFSClient

  @Override
  public WRITE3Response write(XDR xdr, Channel channel, int xid,
      SecurityHandler securityHandler, InetAddress client) {
    WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3_OK);

    DFSClient dfsClient = clientCache.getDfsClient(securityHandler.getUser());
    if (dfsClient == null) {
      response.setStatus(Nfs3Status.NFS3ERR_SERVERFAULT);
      return response;
    }
   
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.