Examples of HpcApplicationDeploymentType


Examples of org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType

public class ApplicationProcessor {
 
  public static void generateJobSpecificAppElements(JobDefinitionType value, JobExecutionContext context){
   
    HpcApplicationDeploymentType appDepType = (HpcApplicationDeploymentType) context
        .getApplicationContext().getApplicationDeploymentDescription()
        .getType();
   
    createGenericApplication(value, appDepType);
   
    if (appDepType.getApplicationEnvironmentArray().length > 0) {
      createApplicationEnvironment(value,
          appDepType.getApplicationEnvironmentArray(), appDepType);
    }

   
    if (appDepType.getExecutableLocation() != null) {
      FileNameType fNameType = FileNameType.Factory.newInstance();
      fNameType.setStringValue(appDepType.getExecutableLocation());
      if(isParallelJob(appDepType)) {
        JSDLUtils.getOrCreateSPMDApplication(value).setExecutable(fNameType);
        JSDLUtils.getSPMDApplication(value).setSPMDVariation(getSPMDVariation(appDepType));
       
        if(getValueFromMap(appDepType, JSDLUtils.NUMBEROFPROCESSES)!=null){
          NumberOfProcessesType num = NumberOfProcessesType.Factory.newInstance();
          num.setStringValue(getValueFromMap(appDepType, JSDLUtils.NUMBEROFPROCESSES));
          JSDLUtils.getSPMDApplication(value).setNumberOfProcesses(num);
        }
       
        if(getValueFromMap(appDepType, JSDLUtils.PROCESSESPERHOST)!=null){
          ProcessesPerHostType pph = ProcessesPerHostType.Factory.newInstance();
          pph.setStringValue(getValueFromMap(appDepType, JSDLUtils.PROCESSESPERHOST));
          JSDLUtils.getSPMDApplication(value).setProcessesPerHost(pph);
        }
       
        if(getValueFromMap(appDepType, JSDLUtils.THREADSPERHOST)!=null){
          ThreadsPerProcessType tpp = ThreadsPerProcessType.Factory.newInstance();
          tpp.setStringValue(getValueFromMap(appDepType, JSDLUtils.THREADSPERHOST));
          JSDLUtils.getSPMDApplication(value).setThreadsPerProcess(tpp);
        }
      }
      else
        JSDLUtils.getOrCreatePOSIXApplication(value).setExecutable(fNameType);
    }
   
    if(appDepType.getStandardOutput() != null) {
      String stdout = new File(appDepType.getStandardOutput()).getName();
      ApplicationProcessor.setApplicationStdOut(value, appDepType, stdout);
    }
   
    if(appDepType.getStandardError() != null) {
      String stderr = new File(appDepType.getStandardError()).getName();
      ApplicationProcessor.setApplicationStdErr(value, appDepType, stderr);
    }
   
   
   
View Full Code Here

Examples of org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType

public class ResourceProcessor {

 
  public static void generateResourceElements(JobDefinitionType value, JobExecutionContext context) throws Exception{
   
    HpcApplicationDeploymentType appDepType = (HpcApplicationDeploymentType) context
        .getApplicationContext().getApplicationDeploymentDescription()
        .getType();
   
    createMemory(value, appDepType);
   
    if (appDepType.getCpuCount() > 0) {
      RangeValueType rangeType = new RangeValueType();
      rangeType.setLowerBound(Double.NaN);
      rangeType.setUpperBound(Double.NaN);
      rangeType.setExact(appDepType.getCpuCount());
      JSDLUtils.setTotalCPUCountRequirements(value, rangeType);
    }

    if (appDepType.getProcessorsPerNode() > 0) {
      RangeValueType rangeType = new RangeValueType();
      rangeType.setLowerBound(Double.NaN);
      rangeType.setUpperBound(Double.NaN);
      rangeType.setExact(appDepType.getProcessorsPerNode());
      JSDLUtils.setIndividualCPUCountRequirements(value, rangeType);
    }
   
    if (appDepType.getNodeCount() > 0) {
      RangeValueType rangeType = new RangeValueType();
      rangeType.setLowerBound(Double.NaN);
      rangeType.setUpperBound(Double.NaN);
      rangeType.setExact(appDepType.getNodeCount());
      JSDLUtils.setTotalResourceCountRequirements(value, rangeType);
    }
   
    if(appDepType.getMaxWallTime() > 0) {
      RangeValueType cpuTime = new RangeValueType();
      cpuTime.setLowerBound(Double.NaN);
      cpuTime.setUpperBound(Double.NaN);
      long wallTime = appDepType.getMaxWallTime() * 60;
      cpuTime.setExact(wallTime);
      JSDLUtils.setIndividualCPUTimeRequirements(value, cpuTime);
    }
  }
View Full Code Here

Examples of org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType

public class DataStagingProcessor {
 
  public static void generateDataStagingElements(JobDefinitionType value, JobExecutionContext context) throws Exception{
   
    HpcApplicationDeploymentType appDepType = (HpcApplicationDeploymentType) context
        .getApplicationContext().getApplicationDeploymentDescription()
        .getType();

   
    String gridftpEndpoint = ((UnicoreHostType) context.getApplicationContext().getHostDescription().getType())
View Full Code Here

Examples of org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType

    }

    ;

    public static GramAttributes configureRemoteJob(JobExecutionContext context) throws ToolsException {
        HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) context.getApplicationContext().getApplicationDeploymentDescription().getType();
        GramAttributes jobAttr = new GramAttributes();
        jobAttr.setExecutable(app.getExecutableLocation());
        jobAttr.setDirectory(app.getStaticWorkingDirectory());
        jobAttr.setStdout(app.getStandardOutput());
        jobAttr.setStderr(app.getStandardError());

        /*
         * The env here contains the env of the host and the application. i.e the env specified in the host description
         * and application description documents
         */
        NameValuePairType[] env = app.getApplicationEnvironmentArray();
        if (env.length != 0) {
            Map<String, String> nv = new HashMap<String, String>();
            for (int i = 0; i < env.length; i++) {
                String key = env[i].getName();
                String value = env[i].getValue();
                nv.put(key, value);
            }

            for (Map.Entry<String, String> entry : nv.entrySet()) {
                jobAttr.addEnvVariable(entry.getKey(), entry.getValue());
            }
        }
        jobAttr.addEnvVariable(Constants.INPUT_DATA_DIR_VAR_NAME, app.getInputDataDirectory());
        jobAttr.addEnvVariable(Constants.OUTPUT_DATA_DIR_VAR_NAME, app.getOutputDataDirectory());

        if (app.getMaxWallTime() > 0) {
            log.debug("Setting max wall clock time to " + app.getMaxWallTime());

            if (app.getMaxWallTime() > 30 && app.getQueue() != null && app.getQueue().getQueueName().equals("debug")) {
                throw new ToolsException("NCSA debug Queue only support jobs < 30 minutes");
            }

            jobAttr.setMaxWallTime(app.getMaxWallTime());
            jobAttr.set("proxy_timeout", "1");
        } else {
            jobAttr.setMaxWallTime(30);
        }

        if (app.getStandardInput() != null && !"".equals(app.getStandardInput())) {
            jobAttr.setStdin(app.getStandardInput());
        } else {
            MessageContext input = context.getInMessageContext();;
            Map<String,Object> inputs = input.getParameters();
            Set<String> keys = inputs.keySet();
            for (String paramName : keys ) {
                ActualParameter actualParameter = (ActualParameter) inputs.get(paramName);
                if ("URIArray".equals(actualParameter.getType().getType().toString())) {
                    String[] values = ((URIArrayType) actualParameter.getType()).getValueArray();
                    for (String value : values) {
                        jobAttr.addArgument(value);
                    }
                } else {
                    String paramValue = MappingFactory.toString(actualParameter);
                    jobAttr.addArgument(paramValue);
                }
            }
        }
        // Using the workflowContext Header values if user provided them in the request and overwrite the default values in DD
        //todo finish the scheduling based on workflow execution context
        ContextHeaderDocument.ContextHeader currentContextHeader = context.getContextHeader();
        if(currentContextHeader != null){
        if (currentContextHeader.getWorkflowSchedulingContext() != null) {
            if (currentContextHeader != null &&
                    currentContextHeader.getWorkflowSchedulingContext().getApplicationSchedulingContextArray() != null &&
                    currentContextHeader.getWorkflowSchedulingContext().getApplicationSchedulingContextArray().length > 0) {
                try {
                    int cpuCount = currentContextHeader.getWorkflowSchedulingContext().getApplicationSchedulingContextArray()[0].getCpuCount();
                    if(cpuCount>0){
                        app.setCpuCount(cpuCount);
                    }
                } catch (NullPointerException e) {
                    log.debug("No Value sent in WorkflowContextHeader for CPU Count, value in the Deployment Descriptor will be used");
                    new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
                }
                try {
                    int nodeCount = currentContextHeader.getWorkflowSchedulingContext().getApplicationSchedulingContextArray()[0].getNodeCount();
                    if(nodeCount>0){
                        app.setNodeCount(nodeCount);
                    }
                } catch (NullPointerException e) {
                    log.debug("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used");
                     new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
                }
                try {
                    String queueName = currentContextHeader.getWorkflowSchedulingContext().getApplicationSchedulingContextArray()[0].getQueueName();
                    if (queueName != null) {
                        if(app.getQueue() == null){
                            QueueType queueType = app.addNewQueue();
                            queueType.setQueueName(queueName);
                        }else{
                            app.getQueue().setQueueName(queueName);
                        }
                    }
                } catch (NullPointerException e) {
                    log.debug("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used");
                     new GFacProviderException("No Value sent in WorkflowContextHeader for Node Count, value in the Deployment Descriptor will be used",e);
                }
            }
        }
        if(currentContextHeader.getWorkflowOutputDataHandling() != null){
            if(currentContextHeader.getWorkflowOutputDataHandling().getApplicationOutputDataHandlingArray().length != 0)
            app.setOutputDataDirectory(currentContextHeader.getWorkflowOutputDataHandling().getApplicationOutputDataHandlingArray()[0].getOutputDataDirectory());
        }
        }
        if (app.getNodeCount() > 0) {
            jobAttr.set("hostCount", String.valueOf(app.getNodeCount()));
            log.debug("Setting number of Nodes to " + app.getCpuCount());
        }
        if (app.getCpuCount() > 0) {
            log.debug("Setting number of procs to " + app.getCpuCount());
            jobAttr.setNumProcs(app.getCpuCount());
        }
        if (app.getMinMemory() > 0) {
            log.debug("Setting minimum memory to " + app.getMinMemory());
            jobAttr.setMinMemory(app.getMinMemory());
        }
        if (app.getMaxMemory() > 0) {
            log.debug("Setting maximum memory to " + app.getMaxMemory());
            jobAttr.setMaxMemory(app.getMaxMemory());
        }
        if (app.getProjectAccount() != null) {
            if (app.getProjectAccount().getProjectAccountNumber() != null) {
                log.debug("Setting project to " + app.getProjectAccount().getProjectAccountNumber());
                jobAttr.setProject(app.getProjectAccount().getProjectAccountNumber());
            }
        }
        if (app.getQueue() != null) {
            if (app.getQueue().getQueueName() != null) {
                log.debug("Setting job queue to " + app.getQueue().getQueueName());
                jobAttr.setQueue(app.getQueue().getQueueName());
            }
        }
        String jobType = JobType.SINGLE.toString();
        if (app.getJobType() != null) {
            jobType = app.getJobType().toString();
        }
        if (jobType.equalsIgnoreCase(JobType.SINGLE.toString())) {
            log.debug("Setting job type to single");
            jobAttr.setJobType(GramAttributes.JOBTYPE_SINGLE);
        } if (jobType.equalsIgnoreCase(JobType.SERIAL.toString())) {
View Full Code Here

Examples of org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType

        ((GlobusHostType)host.getType()).setGridFTPEndPointArray(new String[]{gridftpAddress});
        /*
           * App
           */
        ApplicationDescription appDesc = new ApplicationDescription(HpcApplicationDeploymentType.type);
        HpcApplicationDeploymentType app = (HpcApplicationDeploymentType)appDesc.getType();
        ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
        name.setStringValue("EchoLocal");
        app.setApplicationName(name);
        ProjectAccountType projectAccountType = app.addNewProjectAccount();
        projectAccountType.setProjectAccountNumber("TG-AST110064");

        QueueType queueType = app.addNewQueue();
        queueType.setQueueName("development");

        app.setCpuCount(1);
        app.setJobType(JobTypeType.SERIAL);
        app.setNodeCount(1);
        app.setProcessorsPerNode(1);

        /*
           * Use bat file if it is compiled on Windows
           */
        app.setExecutableLocation("/bin/echo");

        /*
           * Default tmp location
           */
        String tempDir = "/scratch/01437/ogce/test/";
        String date = (new Date()).toString();
        date = date.replaceAll(" ", "_");
        date = date.replaceAll(":", "_");

        tempDir = tempDir + File.separator
                + "SimpleEcho" + "_" + date + "_" + UUID.randomUUID();

        System.out.println(tempDir);
        app.setScratchWorkingDirectory(tempDir);
        app.setStaticWorkingDirectory(tempDir);
        app.setInputDataDirectory(tempDir + File.separator + "inputData");
        app.setOutputDataDirectory(tempDir + File.separator + "outputData");
        app.setStandardOutput(tempDir + File.separator + app.getApplicationName().getStringValue() + ".stdout");
        app.setStandardError(tempDir + File.separator + app.getApplicationName().getStringValue() + ".stderr");


        /*
           * Service
           */
 
View Full Code Here

Examples of org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType

    JobDefinitionDocument jobDefDoc = JobDefinitionDocument.Factory
        .newInstance();
    JobDefinitionType value = jobDefDoc.addNewJobDefinition();

    HpcApplicationDeploymentType appDepType = (HpcApplicationDeploymentType) context
        .getApplicationContext().getApplicationDeploymentDescription()
        .getType();

    // build Identification
    createJobIdentification(value, appDepType);
View Full Code Here

Examples of org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType

  }

  protected ApplicationDescription getApplicationDesc(JobTypeType jobType) {
    ApplicationDescription appDesc = new ApplicationDescription(
        HpcApplicationDeploymentType.type);
    HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) appDesc
        .getType();
    ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory
        .newInstance();
    name.setStringValue("EchoLocal");
    app.setApplicationName(name);
    ProjectAccountType projectAccountType = app.addNewProjectAccount();
    projectAccountType.setProjectAccountNumber("TG-AST110064");

    QueueType queueType = app.addNewQueue();
    queueType.setQueueName("development");

    app.setCpuCount(1);
    // TODO: also handle parallel jobs
    if((jobType.enumValue() == JobTypeType.SERIAL) || (jobType.enumValue() == JobTypeType.SINGLE)) {
      app.setJobType(JobTypeType.SERIAL);
    }
    else if (jobType.enumValue() == JobTypeType.MPI) {
      app.setJobType(JobTypeType.MPI);
    }
    else {
      app.setJobType(JobTypeType.OPEN_MP);
    }
   
    app.setNodeCount(1);
    app.setProcessorsPerNode(1);

    /*
     * Use bat file if it is compiled on Windows
     */
    app.setExecutableLocation("/bin/cat");

    /*
     * Default tmp location
     */
    String date = (new Date()).toString();
    date = date.replaceAll(" ", "_");
    date = date.replaceAll(":", "_");

    String remoteTempDir = scratchDir + File.separator + "SimpleEcho" + "_" + date + "_"
        + UUID.randomUUID();

    System.out.println(remoteTempDir);
   
    // no need of these parameters, as unicore manages by itself
    app.setScratchWorkingDirectory(remoteTempDir);
    app.setStaticWorkingDirectory(remoteTempDir);
    app.setInputDataDirectory(remoteTempDir + File.separator + "inputData");
    app.setOutputDataDirectory(remoteTempDir + File.separator + "outputData");
   
    app.setStandardOutput(app.getOutputDataDirectory()+"/jsdl_stdout");
   
    app.setStandardError(app.getOutputDataDirectory()+"/jsdl_stderr");

    return appDesc;
  }
View Full Code Here

Examples of org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType

      return String.valueOf(num);
    }
  }
 
    private void loadApplicationDescriptionAdvancedOptions() {
        HpcApplicationDeploymentType hpcAppType = getHPCApplicationDescriptionType();
    if (hpcAppType.getJobType()!=null) {
      cmbJobType.setSelectedItem(hpcAppType
          .getJobType().toString());
    }
      txtMaxWallTime.setText(getPropValue(hpcAppType.getMaxWallTime()));
        txtCpuCount.setText(getPropValue(hpcAppType.getCpuCount()));
        txtNodeCount.setText(getPropValue(hpcAppType.getNodeCount()));
        txtProcessorsPerNode.setText(getPropValue(hpcAppType.getProcessorsPerNode()));
        txtMinMemory.setText(getPropValue(hpcAppType.getMinMemory()));
        txtMaxMemory.setText(getPropValue(hpcAppType.getMaxMemory()));
        txtNodeCount.setText(getPropValue(hpcAppType.getNodeCount()));
    ProjectAccountType projectAccount = getProjectAccountType();

    txtProjectAccountNumber.setText(projectAccount.getProjectAccountNumber()==null? "":projectAccount.getProjectAccountNumber());
    txtProjectAccountDescription.setText(projectAccount.getProjectAccountDescription()==null? "":projectAccount.getProjectAccountDescription());
View Full Code Here

Examples of org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType

        /*
        * App
        */
        ApplicationDescription appDesc = new ApplicationDescription(HpcApplicationDeploymentType.type);
        HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) appDesc.getType();
        app.setCpuCount(1);
        app.setNodeCount(1);
        ApplicationDeploymentDescriptionType.ApplicationName name = appDesc.getType().addNewApplicationName();
        name.setStringValue("EchoLocal");
        app.setExecutableLocation("/bin/echo");
        app.setScratchWorkingDirectory(properties.getProperty("scratch.working.directory"));
        app.setCpuCount(1);
        ProjectAccountType projectAccountType = ((HpcApplicationDeploymentType) appDesc.getType()).addNewProjectAccount();
        projectAccountType.setProjectAccountNumber(properties.getProperty("allocation.charge.number"));
        QueueType queueType = app.addNewQueue();
        queueType.setQueueName(properties.getProperty("defualt.queue"));
        app.setMaxMemory(100);
       
        /*
           * Service
           */
        ServiceDescription serv = new ServiceDescription();
View Full Code Here

Examples of org.apache.airavata.schemas.gfac.HpcApplicationDeploymentType

        ((GlobusHostType) host.getType()).setGridFTPEndPointArray(new String[]{properties.getProperty("gridftp.endpoint")});
        ((GlobusHostType) host.getType()).setGlobusGateKeeperEndPointArray(new String[]{properties.getProperty("gram.endpoints")});

        /* Application */
        ApplicationDescription appDesc = new ApplicationDescription(HpcApplicationDeploymentType.type);
        HpcApplicationDeploymentType app = (HpcApplicationDeploymentType) appDesc.getType();
        app.setCpuCount(1);
        app.setNodeCount(1);
        ApplicationDeploymentDescriptionType.ApplicationName name = appDesc.getType().addNewApplicationName();
        name.setStringValue("EchoMPILocal");
        app.setExecutableLocation("/share/home/01437/ogce/airavata-test/mpi-hellow-world");
        app.setScratchWorkingDirectory(properties.getProperty("scratch.working.directory"));
        app.setCpuCount(16);
        app.setJobType(JobTypeType.MPI);
        //app.setMinMemory();
        ProjectAccountType projectAccountType = ((HpcApplicationDeploymentType) appDesc.getType()).addNewProjectAccount();
        projectAccountType.setProjectAccountNumber(properties.getProperty("allocation.charge.number"));

        /* Service */
 
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.