Package org.hive2hive.core.processes.framework.exceptions

Examples of org.hive2hive.core.processes.framework.exceptions.ProcessExecutionException


      DownloadTaskDirect task = new DownloadTaskDirect(metaFile.getMetaChunks(), destination,
          metaFile.getId(), session.getUserId(), ownPeerAddress, users);
      session.getDownloadManager().submit(task);
      task.join();
    } catch (InterruptedException e) {
      throw new ProcessExecutionException(e.getMessage());
    }

  }
View Full Code Here


  }

  @Override
  protected void doExecute() throws InvalidProcessStateException, ProcessExecutionException {
    if (context.consumeMetaFile() == null) {
      throw new ProcessExecutionException("No meta file given.");
    }
    if (context.consumeProtectionKeys() == null) {
      throw new ProcessExecutionException("No protection keys given.");
    }
    if (context.consumeEncryptedMetaFile() == null) {
      throw new ProcessExecutionException("No encrypted meta file given.");
    }

    try {
      remove(context.consumeMetaFile().getId(), H2HConstants.META_FILE, context.consumeProtectionKeys());
    } catch (RemoveFailedException e) {
      throw new ProcessExecutionException("Remove of meta document failed.", e);
    }
  }
View Full Code Here

    UserProfile userProfile = null;
    try {
      // fetch user profile (only read)
      userProfile = profileManager.getUserProfile(getID(), false);
    } catch (GetFailedException e) {
      throw new ProcessExecutionException(e);
    }

    // find the parent node using the relative path to navigate there
    FolderIndex parentNode = (FolderIndex) userProfile.getFileByPath(file.getParentFile(), root);

    // validate the write protection
    if (!parentNode.canWrite()) {
      throw new ProcessExecutionException(String.format(
          "This directory '%s' is write protected (and we don't have the keys).", file
              .getParentFile().getName()));
    }
   
    // provide the content protection keys
View Full Code Here

    // validate the file size
    if (BigInteger.valueOf(FileUtil.getFileSize(file)).compareTo(config.getMaxFileSize()) == 1) {
      logger.debug("File {} is a 'large file'.", file.getName());
      if (!allowLargeFile) {
        throw new ProcessExecutionException("Large files are not allowed (" + file.getName() + ").");
      }
      context.setLargeFile(true);
    } else {
      logger.debug("File {} is a 'small file'.", file.getName());
      context.setLargeFile(false);
View Full Code Here

    Chunk chunk;
    try {
      chunk = FileChunkUtil.getChunk(file, config.getChunkSize(), index, chunkId);
    } catch (IOException e) {
      logger.error("File {}: Could not read the file.", file.getAbsolutePath());
      throw new ProcessExecutionException("File " + file.getAbsolutePath()
          + ": Could not read the file", e);
    }

    if (chunk != null) {
      try {
        // encrypt the chunk prior to put such that nobody can read it
        HybridEncryptedContent encryptedContent = H2HEncryptionUtil.encryptHybrid(chunk, context
            .consumeChunkKeys().getPublic());

        logger.debug("Uploading chunk {} of file {}.", chunk.getOrder(), file.getName());
        Parameters parameters = new Parameters().setLocationKey(chunk.getId())
            .setContentKey(H2HConstants.FILE_CHUNK).setData(encryptedContent)
            .setProtectionKeys(context.consumeProtectionKeys()).setTTL(chunk.getTimeToLive());

        // data manager has to produce the hash, which gets used for signing
        parameters.setHashFlag(true);
        // put the encrypted chunk into the network
        put(parameters);

        // store the hash in the index of the meta file
        context.getMetaChunks().add(new MetaChunk(chunkId, parameters.getHash(), index));
      } catch (IOException | DataLengthException | InvalidKeyException | IllegalStateException
          | InvalidCipherTextException | IllegalBlockSizeException | BadPaddingException
          | PutFailedException e) {
        logger.error("Could not encrypt and put the chunk.", e);
        throw new ProcessExecutionException("Could not encrypt and put the chunk.", e);
      }
    }
  }
View Full Code Here

  protected void doExecute() throws InvalidProcessStateException, ProcessExecutionException {
    UserProfileManager profileManager;
    try {
      profileManager = networkManager.getSession().getProfileManager();
    } catch (NoSessionException e) {
      throw new ProcessExecutionException(e);
    }

    UserProfile profile = null;
    try {
      profile = profileManager.getUserProfile(getID(), false);
    } catch (GetFailedException e) {
      throw new ProcessExecutionException("User profile could not be loaded.");
    }

    FileSynchronizer synchronizer;
    try {
      synchronizer = new FileSynchronizer(networkManager.getSession().getRoot(), profile);
    } catch (ClassNotFoundException | NoSessionException | IOException e) {
      throw new ProcessExecutionException("FileSynchronizer could not be instantiated.", e);
    }
    try {
      synchronizeFiles(synchronizer);
    } catch (NoSessionException e) {
      throw new ProcessExecutionException(e);
    } catch (NoPeerConnectionException e) {
      throw new ProcessExecutionException(e);
    }

    if (context.getIsInitial()) {
      // if is initial, process the user profile queue
      logger.debug("Starting to process all user tasks.");
View Full Code Here

        return;
      }
    }

    logger.warn("No online peer found that could be contacted to get the file {}", task.getDestinationName());
    throw new ProcessExecutionException("No online peer found that could be contacted");
  }
View Full Code Here

      params.setDownloadManager(downloadManager);

      // create session
      session = new H2HSession(params);
    } catch (IOException | NoPeerConnectionException e) {
      throw new ProcessExecutionException("Session could not be created.", e);
    }

    // set session
    networkManager.setSession(session);
  }
View Full Code Here

      movedNode.setParent(newParent);
      newParent.addChild(movedNode);

      // validate
      if (!oldParent.canWrite()) {
        throw new ProcessExecutionException("No write access to the source directory");
      } else if (!newParent.canWrite()) {
        throw new ProcessExecutionException("No write access to the destination directory");
      }

      // update in DHT
      profileManager.readyToPut(userProfile, getID());
      profileUpdated = true;
      logger.debug("Successfully relinked the moved file in the user profile.");

      // check if the protection key needs to be updated
      if (!H2HEncryptionUtil.compare(oldParent.getProtectionKeys(), newParent.getProtectionKeys())) {
        // update the protection key of the meta file and eventually all chunks
        logger.info("Required to update the protection key of the moved file(s)/folder(s).");
        initPKUpdateStep(movedNode, oldParent.getProtectionKeys(), newParent.getProtectionKeys());
      }

      // notify other users
      initNotificationParameters(oldParent.getCalculatedUserList(), movedNode);

    } catch (NoSessionException | GetFailedException | PutFailedException | NoPeerConnectionException e) {
      throw new ProcessExecutionException(e);
    }
  }
View Full Code Here

      // store the hash
      context.provideHash(parameters.getHash());

    } catch (IOException | DataLengthException | InvalidKeyException | IllegalStateException
        | InvalidCipherTextException | IllegalBlockSizeException | BadPaddingException e) {
      throw new ProcessExecutionException("Meta file could not be encrypted.", e);
    } catch (PutFailedException e) {
      throw new ProcessExecutionException(e);
    }
  }
View Full Code Here

TOP

Related Classes of org.hive2hive.core.processes.framework.exceptions.ProcessExecutionException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.