Examples of TrainingSet


Examples of org.neuroph.core.learning.TrainingSet

     * Runs this sample
     */
    public static void main(String[] args) {
     
        // create training set (logical XOR function)
        TrainingSet trainingSet = new TrainingSet(2, 1);
        trainingSet.addElement(new SupervisedTrainingElement(new double[]{0, 0}, new double[]{0}));
        trainingSet.addElement(new SupervisedTrainingElement(new double[]{0, 1}, new double[]{1}));
        trainingSet.addElement(new SupervisedTrainingElement(new double[]{1, 0}, new double[]{1}));
        trainingSet.addElement(new SupervisedTrainingElement(new double[]{1, 1}, new double[]{0}));

        // create multi layer perceptron
        MultiLayerPerceptron myMlPerceptron = new MultiLayerPerceptron(TransferFunctionType.TANH, 2, 3, 1);

        // enable batch if using MomentumBackpropagation
View Full Code Here

Examples of org.neuroph.core.learning.TrainingSet

     * @param inputsCount training element (neural net) inputs count
     * @param outputsCount training element (neural net) ouputs count
     * @return training set with stock data
     */
    public static TrainingSet importFromArray(double[] values, int inputsCount, int outputsCount) {
        TrainingSet trainingSet = new TrainingSet(inputsCount, outputsCount);
        for (int i = 0; i < values.length - inputsCount; i++) {
            ArrayList<Double> inputs = new ArrayList<Double>();
            for (int j = i; j < i + inputsCount; j++) {
                inputs.add(values[j]);
            }
            ArrayList<Double> outputs = new ArrayList<Double>();
            if (outputsCount > 0 && i + inputsCount + outputsCount <= values.length) {
                for (int j = i + inputsCount; j < i + inputsCount + outputsCount; j++) {
                    outputs.add(values[j]);
                }
                if (outputsCount > 0) {
                    trainingSet.addElement(new SupervisedTrainingElement(inputs, outputs));
                } else {
                    trainingSet.addElement(new TrainingElement(inputs));
                }
            }
        }
        return trainingSet;
    }
View Full Code Here

Examples of org.neuroph.core.learning.TrainingSet

        int maxIterations = 10000;
        NeuralNetwork neuralNet = new MultiLayerPerceptron(4, 9, 1);
        ((LMS) neuralNet.getLearningRule()).setMaxError(0.001);//0-1
        ((LMS) neuralNet.getLearningRule()).setLearningRate(0.7);//0-1
        ((LMS) neuralNet.getLearningRule()).setMaxIterations(maxIterations);//0-1
        TrainingSet trainingSet = new TrainingSet();

        double daxmax = 10000.0D;
        trainingSet.addElement(new SupervisedTrainingElement(new double[]{3710.0D / daxmax, 3690.0D / daxmax, 3890.0D / daxmax, 3695.0D / daxmax}, new double[]{3666.0D / daxmax}));
        trainingSet.addElement(new SupervisedTrainingElement(new double[]{3690.0D / daxmax, 3890.0D / daxmax, 3695.0D / daxmax, 3666.0D / daxmax}, new double[]{3692.0D / daxmax}));
        trainingSet.addElement(new SupervisedTrainingElement(new double[]{3890.0D / daxmax, 3695.0D / daxmax, 3666.0D / daxmax, 3692.0D / daxmax}, new double[]{3886.0D / daxmax}));
        trainingSet.addElement(new SupervisedTrainingElement(new double[]{3695.0D / daxmax, 3666.0D / daxmax, 3692.0D / daxmax, 3886.0D / daxmax}, new double[]{3914.0D / daxmax}));
        trainingSet.addElement(new SupervisedTrainingElement(new double[]{3666.0D / daxmax, 3692.0D / daxmax, 3886.0D / daxmax, 3914.0D / daxmax}, new double[]{3956.0D / daxmax}));
        trainingSet.addElement(new SupervisedTrainingElement(new double[]{3692.0D / daxmax, 3886.0D / daxmax, 3914.0D / daxmax, 3956.0D / daxmax}, new double[]{3953.0D / daxmax}));
        trainingSet.addElement(new SupervisedTrainingElement(new double[]{3886.0D / daxmax, 3914.0D / daxmax, 3956.0D / daxmax, 3953.0D / daxmax}, new double[]{4044.0D / daxmax}));
        trainingSet.addElement(new SupervisedTrainingElement(new double[]{3914.0D / daxmax, 3956.0D / daxmax, 3953.0D / daxmax, 4044.0D / daxmax}, new double[]{3987.0D / daxmax}));
        trainingSet.addElement(new SupervisedTrainingElement(new double[]{3956.0D / daxmax, 3953.0D / daxmax, 4044.0D / daxmax, 3987.0D / daxmax}, new double[]{3996.0D / daxmax}));
        trainingSet.addElement(new SupervisedTrainingElement(new double[]{3953.0D / daxmax, 4044.0D / daxmax, 3987.0D / daxmax, 3996.0D / daxmax}, new double[]{4043.0D / daxmax}));
        trainingSet.addElement(new SupervisedTrainingElement(new double[]{4044.0D / daxmax, 3987.0D / daxmax, 3996.0D / daxmax, 4043.0D / daxmax}, new double[]{4068.0D / daxmax}));
        trainingSet.addElement(new SupervisedTrainingElement(new double[]{3987.0D / daxmax, 3996.0D / daxmax, 4043.0D / daxmax, 4068.0D / daxmax}, new double[]{4176.0D / daxmax}));
        trainingSet.addElement(new SupervisedTrainingElement(new double[]{3996.0D / daxmax, 4043.0D / daxmax, 4068.0D / daxmax, 4176.0D / daxmax}, new double[]{4187.0D / daxmax}));
        trainingSet.addElement(new SupervisedTrainingElement(new double[]{4043.0D / daxmax, 4068.0D / daxmax, 4176.0D / daxmax, 4187.0D / daxmax}, new double[]{4223.0D / daxmax}));
        trainingSet.addElement(new SupervisedTrainingElement(new double[]{4068.0D / daxmax, 4176.0D / daxmax, 4187.0D / daxmax, 4223.0D / daxmax}, new double[]{4259.0D / daxmax}));
        trainingSet.addElement(new SupervisedTrainingElement(new double[]{4176.0D / daxmax, 4187.0D / daxmax, 4223.0D / daxmax, 4259.0D / daxmax}, new double[]{4203.0D / daxmax}));
        trainingSet.addElement(new SupervisedTrainingElement(new double[]{4187.0D / daxmax, 4223.0D / daxmax, 4259.0D / daxmax, 4203.0D / daxmax}, new double[]{3989.0D / daxmax}));
        neuralNet.learnInSameThread(trainingSet);
        System.out.println("Time stamp N2:" + new SimpleDateFormat("dd-MMM-yyyy HH:mm:ss:MM").format(new Date()));

        TrainingSet testSet = new TrainingSet();
        testSet.addElement(new TrainingElement(new double[]{4223.0D / daxmax, 4259.0D / daxmax, 4203.0D / daxmax, 3989.0D / daxmax}));

        for (TrainingElement testElement : testSet.trainingElements()) {
            neuralNet.setInput(testElement.getInput());
            neuralNet.calculate();
            double[] networkOutput = neuralNet.getOutput();
            System.out.print("Input: " + Arrays.toString(testElement.getInput()) );
            System.out.println(" Output: " + Arrays.toString(networkOutput) );
View Full Code Here

Examples of org.neuroph.core.learning.TrainingSet

  {

    FileReader fileReader = null;

    try {
     TrainingSet trainingSet = new TrainingSet();
     fileReader = new FileReader(new File(filePath));
     BufferedReader reader = new BufferedReader(fileReader);

     String line = "";
     
      while((line = reader.readLine())!=null) {
        double[] inputs = new double[inputsCount];
        double[] outputs = new double[outputsCount];
        String[] values = line.split(separator);

        if (values[0].equals("")) continue; // skip if line was empty

        for (int i = 0; i < inputsCount; i++)
          inputs[i] =  Double.parseDouble(values[i]);

           for (int i = 0; i < outputsCount; i++)
          outputs[i] = Double.parseDouble(values[inputsCount + i]);

        if (outputsCount>0) {
              trainingSet.addElement(new SupervisedTrainingElement(inputs, outputs));
        } else {
              trainingSet.addElement(new TrainingElement(inputs));
        }
      }

      return trainingSet;
     
View Full Code Here

Examples of org.neuroph.core.learning.TrainingSet

         * @param imageLabels image labels
         * @param rgbDataMap map collection of rgb data
         * @return training set for the specified image data
         */
  public static TrainingSet createTrainingSet(List<String> imageLabels, Map<String, FractionRgbData> rgbDataMap)   { 
    TrainingSet trainingSet = new TrainingSet();

    for (Entry<String, FractionRgbData> entry : rgbDataMap.entrySet()) {
      double[] input = entry.getValue().getFlattenedRgbValues();
      double[] response = createResponse(entry.getKey(), imageLabels);
      trainingSet.addElement(new SupervisedTrainingElement(
          VectorParser.convertToVector(input),
          VectorParser.convertToVector(response)));
    }

                return trainingSet;
View Full Code Here

Examples of org.neuroph.core.learning.TrainingSet

         * @param rgbDataMap map collection of rgb data
         * @return binary black and white training set for the specified image data
         */
        public static TrainingSet createBlackAndWhiteTrainingSet(List<String> imageLabels, Map<String, FractionRgbData> rgbDataMap) throws VectorSizeMismatchException
  {
    TrainingSet trainingSet = new TrainingSet();

    for (Entry<String, FractionRgbData> entry : rgbDataMap.entrySet()) {
      double[] inputRGB = entry.getValue().getFlattenedRgbValues();
                        double[] inputBW = FractionRgbData.convertRgbInputToBinaryBlackAndWhite(inputRGB);
                        double[] response = createResponse(entry.getKey(), imageLabels);
      trainingSet.addElement(new SupervisedTrainingElement(
          VectorParser.convertToVector(inputBW),
          VectorParser.convertToVector(response)));
    }

            return trainingSet;
View Full Code Here

Examples of org.neuroph.core.learning.TrainingSet

        }

        public void trainNet(){
            double traininput [] = new double [9];
            double trainoutput [] = new double[1];
            TrainingSet trainingSet = new TrainingSet();
            input = new File("C:/Users/Ishuah  K/NeuralTicTacToe/src/neuraltictactoe/trainingset/tictactoedata.tttd");

            try {
            r = new Scanner(input);

            System.out.println("File ok.(1)complete");
        } catch (FileNotFoundException ex) {
            JOptionPane.showMessageDialog(null, "Error reading file:"+ex);
        }
      
      
        while(r.hasNext()){
      
            System.out.println("one");
        int ind = 1;
        int index = 0;
        for( ind = 1; ind<10; ind++){
        traininput [index] = r.nextInt();
        System.out.print(traininput[index]+" ");
        index++;
        }
        trainoutput [0] = r.nextDouble();
        System.out.println(trainoutput[0]);
        trainingSet.addElement(new SupervisedTrainingElement(traininput, trainoutput));
       
        }
        if(point >= prevpoint){
            myNeuralNetwork.learnInSameThread(trainingSet);
        }else{
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.