Package org.apache.airavata.gfac.provider

Examples of org.apache.airavata.gfac.provider.GFacProviderException


            String stdOutStr = GFacUtils.readFileToString(app.getStandardOutput());
            String stdErrStr = GFacUtils.readFileToString(app.getStandardError());
      Map<String, Object> output = jobExecutionContext.getOutMessageContext().getParameters();
            OutputUtils.fillOutputFromStdout(output, stdOutStr, stdErrStr);
        } catch (XmlException e) {
            throw new GFacProviderException("Cannot read output:" + e.getMessage(), e, jobExecutionContext);
        } catch (IOException io) {
            throw new GFacProviderException(io.getMessage(), io, jobExecutionContext);
        } catch (Exception e){
          throw new GFacProviderException("Error in retrieving results",e,jobExecutionContext);
        }
    }
View Full Code Here


                    }

                }
            } catch (URISyntaxException e) {
                log.error(e.getMessage());
                throw new GFacProviderException(e.getMessage(), e, jobExecutionContext);
            } catch (ToolsException e) {
                log.error(e.getMessage());
                throw new GFacProviderException(e.getMessage(), e, jobExecutionContext);
            }
            outputNew.getParameters().put(paramName, actualParameter);
        }
        jobExecutionContext.setOutMessageContext(outputNew);
    }
View Full Code Here

                    if(cpuCount>0){
                        app.setCpuCount(cpuCount);
                    }
                } catch (NullPointerException e) {
                    log.debug("No Value sent in WorkflowContextHeader for CPU Count, value in the Deployment Descriptor will be used");
                    new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
                }
                try {
                    int nodeCount = currentContextHeader.getWorkflowSchedulingContext().getApplicationSchedulingContextArray()[0].getNodeCount();
                    if(nodeCount>0){
                        app.setNodeCount(nodeCount);
                    }
                } catch (NullPointerException e) {
                    log.debug("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used");
                     new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
                }
                try {
                    String queueName = currentContextHeader.getWorkflowSchedulingContext().getApplicationSchedulingContextArray()[0].getQueueName();
                    if (queueName != null) {
                        if(app.getQueue() == null){
                            QueueType queueType = app.addNewQueue();
                            queueType.setQueueName(queueName);
                        }else{
                            app.getQueue().setQueueName(queueName);
                        }
                    }
                } catch (NullPointerException e) {
                    log.debug("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used");
                     new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
                }
                try {
                    int maxwallTime = currentContextHeader.getWorkflowSchedulingContext().getApplicationSchedulingContextArray()[0].getMaxWallTime();
                    if(maxwallTime>0){
                        app.setMaxWallTime(maxwallTime);
                    }
                } catch (NullPointerException e) {
                    log.debug("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used");
                     new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
                }
            }
        }
//        if(currentContextHeader.getWorkflowOutputDataHandling() != null){
//            if(currentContextHeader.getWorkflowOutputDataHandling().getApplicationOutputDataHandlingArray().length != 0)
View Full Code Here

      File runscript = createShellScript(jobExecutionContext);
      SCPFileTransfer fileTransfer = securityContext.getSSHClient().newSCPFileTransfer();
      GFacUtils.updateApplicationJobStatus(jobExecutionContext, jobID, ApplicationJobStatus.STAGING);
      fileTransfer.upload(runscript.getAbsolutePath(), remoteFile);
    } catch (IOException e) {
      throw new GFacProviderException(e.getLocalizedMessage(), e);
    }
  }
View Full Code Here

        log.info("Process finished with return value of zero.");
      }
     
      GFacUtils.updateApplicationJobStatus(jobExecutionContext, jobID, ApplicationJobStatus.FINISHED);
    } catch (ConnectionException e) {
      throw new GFacProviderException(e.getMessage(), e);
    } catch (TransportException e) {
      throw new GFacProviderException(e.getMessage(), e);
    } catch (IOException e) {
      throw new GFacProviderException(e.getMessage(), e);
    }finally{
      securityContext.closeSession(session);
    }

  }
View Full Code Here

            isWhirrBasedDeployment = true;
        } else {
            String hadoopConfigDirPath = (String)inMessageContext.getParameter("HADOOP_CONFIG_DIR");
            File hadoopConfigDir = new File(hadoopConfigDirPath);
            if (!hadoopConfigDir.exists()){
                throw new GFacProviderException("Specified hadoop configuration directory doesn't exist.");
            } else if (FileUtils.listFiles(hadoopConfigDir, null, null).size() <= 0){
                throw new GFacProviderException("Cannot find any hadoop configuration files inside specified directory.");
            }

            this.hadoopConfigDir = hadoopConfigDir;
        }
    }
View Full Code Here

                }
            }
        } catch (Exception e) {
            String errMessage = "Error occurred during Map-Reduce job execution.";
            logger.error(errMessage, e);
            throw new GFacProviderException(errMessage, e);
        }
    }
View Full Code Here

            log.debug("RSL = " + rsl);
            GramJob job = new GramJob(rsl);
            return job;
        } catch (ToolsException te) {
            throw new GFacProviderException(te.getMessage(), te, jobExecutionContext);
        }
    }
View Full Code Here

                    log.info("Invalid script reply received. Re-submitting job, id - " + job.getIDAsString());
                    try {
                        reSubmitJob(gateKeeper, jobExecutionContext, host);
                    } catch (GFacException e) {
                        throw new GFacProviderException
                                ("Error during re-submission. Original job submission data - " + errorMsg,  e);
                    }
                    return;
                }

            } else if (listener.getError() == GRAMProtocolErrorConstants.ERROR_AUTHORIZATION) {

                // re-submit with renewed credentials
                if (!authorisationFailedAttempt) {
                    authorisationFailedAttempt = true;
                    log.info("Authorisation error contacting provider. Re-submitting job with renewed credentials.");

                    try {
                        renewCredentials(jobExecutionContext);
                        reSubmitJob(gateKeeper, jobExecutionContext, host);
                    } catch (GFacException e) {
                        throw new GFacProviderException
                                ("Error during re-submission. Original job submission data - " + errorMsg,  e);
                    }

                    return;
                }
View Full Code Here

            if (jobExecutionContext.getSecurityContext(AmazonSecurityContext.AMAZON_SECURITY_CONTEXT)
                    instanceof AmazonSecurityContext) {
                this.amazonSecurityContext = (AmazonSecurityContext) jobExecutionContext.
                        getSecurityContext(AmazonSecurityContext.AMAZON_SECURITY_CONTEXT);
            } else {
                throw new GFacProviderException("Amazon Security Context is not set" + jobExecutionContext);
            }
        } else {
            throw new GFacProviderException("Job Execution Context is null" + jobExecutionContext);
        }

        if (log.isDebugEnabled()) {
            log.debug("ACCESS_KEY:" + amazonSecurityContext.getAccessKey());
            log.debug("SECRET_KEY:" + amazonSecurityContext.getSecretKey());
            log.debug("AMI_ID:" + amazonSecurityContext.getAmiId());
            log.debug("INS_ID:" + amazonSecurityContext.getInstanceId());
            log.debug("INS_TYPE:" + amazonSecurityContext.getInstanceType());
            log.debug("USERNAME:" + amazonSecurityContext.getUserName());
        }
        saveApplicationJob(jobExecutionContext);
//        job
        /* Validation */
        if (amazonSecurityContext.getAccessKey() == null || amazonSecurityContext.getAccessKey().isEmpty())
            throw new GFacProviderException("EC2 Access Key is empty", jobExecutionContext);
        if (amazonSecurityContext.getSecretKey() == null || amazonSecurityContext.getSecretKey().isEmpty())
            throw new GFacProviderException("EC2 Secret Key is empty", jobExecutionContext);
        if ((amazonSecurityContext.getAmiId() == null && amazonSecurityContext.getInstanceId() == null) ||
                (amazonSecurityContext.getAmiId() != null && amazonSecurityContext.getAmiId().isEmpty()) ||
                (amazonSecurityContext.getInstanceId() != null && amazonSecurityContext.getInstanceId().isEmpty()))
            throw new GFacProviderException("EC2 AMI or Instance ID is empty", jobExecutionContext);
        if (amazonSecurityContext.getUserName() == null || amazonSecurityContext.getUserName().isEmpty())
            throw new GFacProviderException("EC2 Username is empty", jobExecutionContext);

        /* Need to start EC2 instance before running it */
        AWSCredentials credential =
                new BasicAWSCredentials(amazonSecurityContext.getAccessKey(), amazonSecurityContext.getSecretKey());
        AmazonEC2Client ec2client = new AmazonEC2Client(credential);
View Full Code Here

TOP

Related Classes of org.apache.airavata.gfac.provider.GFacProviderException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.