Examples of HdfsFile


Examples of com.hdfs.file.bean.HdfsFile

     */
    long size = file.length() / 1024; // 获取所上传的文件的大小(kb)
    memory.setMemoryused((int) (memory.getMemoryused() + size)); // 更新已使用的空间
    filedao.updateMemory(memory); // 实际更新已使用的空间

    HdfsFile newfile = new HdfsFile(); // 建立HdfsFile对象,用于写入数据库
    dillResult resultReturn = new dillResult(); // 建立dillResult不知有何用

    /*
     * 获取上传到所在目录对应的url,也就是要上传文件到hdfs文件系统的目标目录
     */
    HdfsFile dfsfile = filedao.findFile(currentId);
    String dst = dfsfile.getFileUrl(); // 获得当前目录的url

    upLoadAction upAction = new upLoadAction();

    boolean result = upAction.copytoDFS(file, dst, true, filename,
        safelevel);

    if (result) {
      SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
      Date newDate;
      try {
        newDate = new Date(dateFormat.parse(deadline).getTime());
        newfile.setDeadline(newDate);
      } catch (ParseException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
      }

      newfile.setSize(size);
      newfile.setType(fileUtil.checkType(filename));
      newfile.setFileName(filename);
      newfile.setFileUrl(dfsfile.getFileUrl() + "/" + filename);
      newfile.setParentid(dfsfile.getFileId());
      newfile.setFileId(pathToId.ParsepathToId(newfile.getFileUrl()));
      newfile.setUserId(dfsfile.getUserId());
      newfile.setCreateTime(new Date());
      newfile.setModifiedTime(new Date());
      newfile.setSafeLevel(safelevel);
      result = filedao.insertFile(newfile);
    }
View Full Code Here

Examples of com.hdfs.file.bean.HdfsFile

  @Override
  public File downLoad(long fileId) throws IOException {
    /*
     * 从数据库中找到fileId对应的HdfsFile记录用来获取该文件存放在hdfs文件系统的路径
     */
    HdfsFile dfsfile = filedao.findFile(fileId);

    String tmpFile = "tmp" + fileId;// 构造临时文件夹
    File dst = new File("E:/hadoop/temp" + tmpFile);// 建立临时的文件

    /*
     * 从hdfs文件系统下载文件到tomcat服务器临时文件dst
     */
    downLoadAction down = new downLoadAction();
    down.copyFileFromFs(dfsfile.getFileUrl(), dst, false);

    dst.deleteOnExit();
    return dst;
  }
View Full Code Here

Examples of com.hdfs.file.bean.HdfsFile

  @Override
  public void storeEncryptDataKey(long fileId, byte[] encryptedDataSecretKey) {
    /*
     * 找到fileId对应的HdfsFile记录
     */
    HdfsFile hdfsFile = filedao.findFile(fileId);
    filedao.updateEncrypt_DataKey(hdfsFile, encryptedDataSecretKey);

  }
View Full Code Here

Examples of com.hdfs.file.bean.HdfsFile

  }

  @Override
  public boolean isEncryptFile(long fileId) {
    HdfsFile dfsfile = filedao.findFile(fileId);
    if (dfsfile.getEncryptDataKey() == null) {
      return false;
    } else {
      return true;
    }
View Full Code Here

Examples of com.hdfs.file.bean.HdfsFile

    RSAPrivateKey pKey = (RSAPrivateKey) encrypter.loadKey(privateKeyPath,
        0);
    /*
     * 获取fileId对应的已加密数据密钥
     */
    HdfsFile dfsfile = filedao.findFile(fileId);
    byte[] encryptDataKey = dfsfile.getEncryptDataKey();

    /*
     * RSAEncrypter 对象调用解密模块,解密已加密的数据密钥
     */
    byte[] dataKey = encrypter.decrypt(pKey, encryptDataKey);
View Full Code Here

Examples of com.hdfs.file.bean.HdfsFile

    return in;
  }

  @Override
  public boolean exists(long currentId, String filename) {
    HdfsFile dfsfile = filedao.findFile(currentId);
    String dst = dfsfile.getFileUrl(); // 获得当前目录的url
    String filePath = dst + "/" + filename;

    upLoadAction upAction = new upLoadAction();
    // 查找路径filePath对应的文件是否存在
    return upAction.fileExists(filePath);
View Full Code Here

Examples of com.hdfs.file.bean.HdfsFile

  @Override
  public long getDeletedFileId(long currentId, String filename) {
    /*
     * 获取要删除文件的完整路径
     */
    HdfsFile dfsfile = filedao.findFile(currentId);
    String dst = dfsfile.getFileUrl(); // 获得当前目录的url
    String filePath = dst + "/" + filename;
    return filedao.getFileId(filePath);
  }
View Full Code Here

Examples of mil.nga.giat.geowave.ingest.hdfs.HdfsFile

  }

  @Override
  public HdfsFile[] toHdfsObjects(
      final File input ) {
    final HdfsFile hfile = new HdfsFile();
    hfile.setOriginalFilePath(input.getAbsolutePath());
    try {
      hfile.setOriginalFile(ByteBuffer.wrap(Files.readAllBytes(input.toPath())));
    }
    catch (final IOException e) {
      LOGGER.warn(
          "Unable to read GeoLife file: " + input.getAbsolutePath(),
          e);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.