Examples of PipelineWorkflow


Examples of uk.org.ogsadai.client.toolkit.PipelineWorkflow

    {    
        CreateDataCache createCache = new CreateDataCache();
        DeliverToRequestStatus deliver = new DeliverToRequestStatus();
        deliver.connectInput(createCache.getResultOutput());
       
        PipelineWorkflow workflow = new PipelineWorkflow();
        workflow.add(createCache);
        workflow.add(deliver);
        executeWorkflow(drer, workflow);
        ResourceID cacheID = createCache.nextResult();
       
        System.out.println("Created data cache with ID: " + cacheID);
       
        SQLQuery query = new SQLQuery();
        query.addExpression("SELECT name, address FROM littleblackbook WHERE id<10");
        query.setResourceID("MySQLResource");
        WriteToDataCache write = new WriteToDataCache();
        write.setResourceID(cacheID);
        write.connectDataInput(query.getDataOutput());
       
        workflow = new PipelineWorkflow();
        workflow.add(query);
        workflow.add(write);
        executeWorkflow(drer, workflow);
       
        System.out.println("Wrote data to cache.");
       
        ReadFromDataCache read = new ReadFromDataCache();
        read.setResourceID(cacheID);
        TupleToWebRowSetCharArrays tupleToWRS = new TupleToWebRowSetCharArrays();
        tupleToWRS.connectDataInput(read.getResultOutput());
        deliver = new DeliverToRequestStatus();
        deliver.connectInput(tupleToWRS.getResultOutput());
       
        workflow = new PipelineWorkflow();
        workflow.add(read);
        workflow.add(tupleToWRS);
        workflow.add(deliver);
        executeWorkflow(drer, workflow);
       
        System.out.println("Read from cache");
       
        while (tupleToWRS.hasNextResult())
View Full Code Here

Examples of uk.org.ogsadai.client.toolkit.PipelineWorkflow

        }
        if (count < 3)
        {
            throw new Exception("Count must be greater than 2");
        }
        PipelineWorkflow workflow = new PipelineWorkflow();

        SQLQuery query = new SQLQuery();
        query.setResourceID(resource);
        query.addExpression("SELECT outlook, windy, temperature, humidity, weather.play FROM weather JOIN measurements ON weather.id = measurements.id");
        workflow.add(query);
       
        ListRandomSplit split = new ListRandomSplit();
        split.setNumberOfOutputs(count);
        split.connectDataInput(query.getDataOutput());
        workflow.add(split);
       
        TupleUnionAll[] union = new TupleUnionAll[count];
        BuildClassifier[] classifier = new BuildClassifier[count];
        TupleToWebRowSetCharArrays[] tupleToWRS = new TupleToWebRowSetCharArrays[count];
        for (int i=0; i<count; i++)
        {
            union[i] = new TupleUnionAll();
            union[i].setNumberOfInputs(count-1);
            for (int j=0; j<i; j++)
            {
                union[i].connectDataInput(j, split.getOutput(j));
            }
            for (int j=i+1; j<count; j++)
            {
                union[i].connectDataInput(j-1, split.getOutput(j));
            }
            workflow.add(union[i]);
           
            classifier[i] = new BuildClassifier();
            classifier[i].newNominalValues();
            classifier[i].addNominalValues(0, "sunny", "overcast", "rain");
            classifier[i].addNominalValues(1, "true", "false");
            classifier[i].addNominalValues(4, "yes", "no");
            classifier[i].finishNominalValues();
            classifier[i].addClassIndex(4);
            classifier[i].connectDataInput(union[i].getDataOutput());
           
            DeliverToRequestStatus deliverDT = new DeliverToRequestStatus();
            deliverDT.connectInput(classifier[i].getResultOutput());

            TupleProjectByIDS project = new TupleProjectByIDS();
            project.addColumnIDs(new int[] {4});
            project.connectDataInput(union[i].getDataOutput());
           
            Classify classify = new Classify();
            classify.connectDataInput(union[i].getDataOutput());
            classify.connectClassifierInput(classifier[i].getResultOutput());
           
            Evaluate evaluate = new Evaluate();
            evaluate.connectExpectedDataInput(project.getDataOutput());
            evaluate.connectPredictedDataInput(classify.getResultOutput());
           
            tupleToWRS[i] = new TupleToWebRowSetCharArrays();
            tupleToWRS[i].connectDataInput(evaluate.getResultOutput());
           
            DeliverToRequestStatus deliverEval = new DeliverToRequestStatus();
            deliverEval.connectInput(tupleToWRS[i].getResultOutput());
           
            workflow.add(classifier[i]);
            workflow.add(classify);
            workflow.add(project);
            workflow.add(evaluate);
            workflow.add(tupleToWRS[i]);
            workflow.add(deliverDT);
            workflow.add(deliverEval);
        }
               
        new Execute(service).execute(workflow);
           
        for (int i=0; i<count; i++)
View Full Code Here

Examples of uk.org.ogsadai.client.toolkit.PipelineWorkflow

        evaluate.connectDataInput(query.getDataOutput());
        DeliverToRequestStatus deliverDT = new DeliverToRequestStatus();
        deliverDT.connectInput(classifier.getResultOutput());
        DeliverToRequestStatus deliverEval = new DeliverToRequestStatus();
        deliverEval.connectInput(evaluate.getResultOutput());
        PipelineWorkflow workflow = new PipelineWorkflow();
        workflow.add(query);
        workflow.add(classifier);
        workflow.add(evaluate);
        workflow.add(deliverDT);
        workflow.add(deliverEval);
               
        new Execute(service).execute(workflow);
           
        System.out.println("   ---- Decision tree:  ----  ");
        System.out.println(classifier.nextResult());
View Full Code Here

Examples of uk.org.ogsadai.client.toolkit.PipelineWorkflow

        union.connectDataInput(1, replace.getDataOutput());
        TupleToWebRowSetCharArrays webrowset = new TupleToWebRowSetCharArrays();
        webrowset.connectDataInput(union.getDataOutput());
        DeliverToRequestStatus deliver = new DeliverToRequestStatus();
        deliver.connectInput(webrowset.getResultOutput());
        PipelineWorkflow workflow = new PipelineWorkflow();
        workflow.add(query);
        workflow.add(mean);
        workflow.add(split);
        workflow.add(replace);
        workflow.add(union);
        workflow.add(webrowset);
        workflow.add(deliver);
       
        new Execute(service).execute(workflow);
       
        Execute.print(webrowset.nextResultAsResultSet());
    }
View Full Code Here

Examples of uk.org.ogsadai.client.toolkit.PipelineWorkflow

        evaluate.connectPMMLTreeInput(classifier.getResultOutput());
        evaluate.connectDataInput(join.getResultOutput());
        DeliverToRequestStatus deliver = new DeliverToRequestStatus();
        deliver.connectInput(evaluate.getResultOutput());
       
        PipelineWorkflow workflow = new PipelineWorkflow();
        workflow.add(query1);
        workflow.add(query2);
        workflow.add(join);
        workflow.add(classifier);
        workflow.add(deliverDT);
        workflow.add(evaluate);
        workflow.add(deliver);
       
        new Execute(service).execute(workflow);
        System.out.println("   ---- Decision tree:  ----  ");
        System.out.println(classifier.nextResult());
        System.out.println("\n\n  ---- Evaluation result:  ----  ");
View Full Code Here

Examples of uk.org.ogsadai.client.toolkit.PipelineWorkflow

        }
        if (count < 3)
        {
            throw new Exception("Count must be greater than 2");
        }
        PipelineWorkflow workflow = new PipelineWorkflow();

        SQLQuery query = new SQLQuery();
        query.setResourceID(resource);
        query.addExpression("SELECT temperature, humidity, outlook, windy, weather.play FROM weather JOIN measurements ON weather.id = measurements.id");
        workflow.add(query);
       
        ListRandomSplit split = new ListRandomSplit();
        split.setNumberOfOutputs(count);
        split.connectDataInput(query.getDataOutput());
        workflow.add(split);
       
        TupleUnionAll[] union = new TupleUnionAll[count];
        Evaluate[] evaluate = new Evaluate[count];
        SprintClassifier[] classifier = new SprintClassifier[count];
        for (int i=0; i<count; i++)
        {
            union[i] = new TupleUnionAll();
            union[i].setNumberOfInputs(count-1);
            for (int j=0; j<i; j++)
            {
                union[i].connectDataInput(j, split.getOutput(j));
            }
            for (int j=i+1; j<count; j++)
            {
                union[i].connectDataInput(j-1, split.getOutput(j));
            }
            workflow.add(union[i]);
           
            classifier[i] = new SprintClassifier();
            classifier[i].addClassName("play");
            classifier[i].addClassValues(new String[] {"yes", "no"});
            classifier[i].connectDataInput(union[i].getDataOutput());
           
            DeliverToRequestStatus deliverDT = new DeliverToRequestStatus();
            deliverDT.connectInput(classifier[i].getResultOutput());

            evaluate[i] = new Evaluate();
            evaluate[i].connectPMMLTreeInput(classifier[i].getResultOutput());
            evaluate[i].connectDataInput(split.getOutput(i));
           
            DeliverToRequestStatus deliverEval = new DeliverToRequestStatus();
            deliverEval.connectInput(evaluate[i].getResultOutput());
           
            workflow.add(classifier[i]);
            workflow.add(evaluate[i]);
            workflow.add(deliverDT);
            workflow.add(deliverEval);
        }
               
        new Execute(service).execute(workflow);
           
        for (int i=0; i<count; i++)
View Full Code Here

Examples of uk.org.ogsadai.client.toolkit.PipelineWorkflow

        // Connect the output of RDFResource to DeliverToRequestStatus
        deliverToRequestStatus.connectInput(tupleToWebRowSet.getResultOutput());

        // Create the workflow
        PipelineWorkflow pipeline = new PipelineWorkflow();
        pipeline.add(rdfActivity);
        pipeline.add(tupleToWebRowSet);
        pipeline.add(deliverToRequestStatus);

        // Excecute the workflow
        RequestResource req = null;
        try
        {
View Full Code Here

Examples of uk.org.ogsadai.client.toolkit.PipelineWorkflow

        CreateDataSource createDataSource = new CreateDataSource();
        DeliverToRequestStatus deliverToRequestStatus = new DeliverToRequestStatus();
        deliverToRequestStatus.connectInput(createDataSource.getResultOutput());

        PipelineWorkflow createSourceWorkflow = new PipelineWorkflow();
        createSourceWorkflow.add(createDataSource);
        createSourceWorkflow.add(deliverToRequestStatus);

        drer.execute(createSourceWorkflow, RequestExecutionType.SYNCHRONOUS);

        ResourceID dataSourceID = createDataSource.nextResult();

        DataSourceResource dataSource = serverProxy
                .getDataSourceResource(dataSourceID);

        // String SPARQLquery = "PREFIX p: <http://dbpedia.org/property/> " +
        // "SELECT ?artist ?artwork ?museum ?director " +
        // "WHERE {" +
        // "   ?artwork p:artist ?artist ." +
        // "  ?artwork p:museum ?museum ." +
        // "   ?museum p:director ?director }";

        String SPARQLquery = "PREFIX p: <http://dbpedia.org/property/>"
                + " SELECT ?film1 ?actor1 ?film2 ?actor2 "
                + "WHERE     {"
                + "   ?film1 p:starring <http://dbpedia.org/resource/Kevin_Bacon> ."
                + "   ?film1 p:starring ?actor1 .   ?film2 p:starring ?actor1 ."
                + "   ?film2 p:starring ?actor2 .   }";

        RDFActivity rdfActivity = new RDFActivity(SPARQLquery);
//        rdfActivity.setResourceID("jorge");

        TupleToWebRowSetCharArrays tupleToWebRowSet = new TupleToWebRowSetCharArrays();
        tupleToWebRowSet.connectDataInput(rdfActivity.getDataOutput());

        WriteToDataSource writeToDataSource = new WriteToDataSource();
        writeToDataSource.setResourceID(dataSourceID);
        writeToDataSource.connectInput(tupleToWebRowSet.getResultOutput());

        // Connect the output of RDFResource to DeliverToRequestStatus

        PipelineWorkflow pipeline = new PipelineWorkflow();
        pipeline.add(rdfActivity);
        pipeline.add(tupleToWebRowSet);
        pipeline.add(writeToDataSource);

        RequestResource requestResource = drer.execute(pipeline,
                RequestExecutionType.ASYNCHRONOUS);
        requestResource.pollUntilRequestStarted(1000);
View Full Code Here

Examples of uk.org.ogsadai.client.toolkit.PipelineWorkflow

        // Connect the output of RDFResource to DeliverToRequestStatus
        deliverToRequestStatus.connectInput(tupleToWebRowSet.getResultOutput());

        // Create the workflow
        PipelineWorkflow pipeline = new PipelineWorkflow();
        pipeline.add(rdfActivity);
        pipeline.add(tupleToWebRowSet);
        pipeline.add(deliverToRequestStatus);

        // Excecute the workflow
        RequestResource req = null;
        try
        {
View Full Code Here

Examples of uk.org.ogsadai.client.toolkit.PipelineWorkflow

        if (node instanceof ProcessingElementNode)
        {
            name = ((ProcessingElementNode)node).getChosenPEOrNodeName();
        }
       
        PipelineWorkflow workflow = getWorkflow(node);
        ActivityDescriptor descriptor = mRegistry.getActivity(name);
        ProcessingElement activity =
            new ProcessingElement(descriptor.getActivityName());
        activity.setResourceInput(descriptor.getResourceInput());
        workflow.add(activity);
        mPEs.put(node, activity);
        createInputsAndOutputs(node, activity);
    }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.