Package org.encog.ml.data

Examples of org.encog.ml.data.MLData


  }

  public void addRow(final int row) {
    final int idealSize = this.data.getIdealSize();
    final int inputSize = this.data.getInputSize();
    final MLData idealData = new BasicMLData(idealSize);
    final MLData inputData = new BasicMLData(inputSize);
    final MLDataPair pair = new BasicMLDataPair(inputData,
        idealData);
    if (row == -1) {
      this.data.getData().add(pair);
    } else {
View Full Code Here


    final int inputSize = this.data.getInputSize();

    // does it fall inside of input or ideal?
    if (col < inputSize) {
      for (final MLDataPair pair : this.data) {
        final MLData input = pair.getInput();
        final double[] d = new double[input.size() - 1];
        int t = 0;
        for (int i = 0; i < input.size(); i++) {
          if (i != col) {
            d[t] = pair.getInput().getData(i);
            t++;
          }
        }
        input.setData(d);
      }
    } else {
      for (final MLDataPair pair : this.data) {
        final MLData ideal = pair.getIdeal();
        final double[] d = new double[ideal.size() - 1];
        int t = 0;
        for (int i = 0; i < ideal.size(); i++) {
          if (i != col - inputSize) {
            d[t] = pair.getInput().getData(i);
            t++;
          }

        }
        ideal.setData(d);
      }
    }

    final TableModelEvent tce = new TableModelEvent(this,
        TableModelEvent.HEADER_ROW);
View Full Code Here

    final int totalWindowSize = this.inputWindow + this.predictWindow;
    final int stopPoint = data.length - totalWindowSize;

    for (int i = 0; i < stopPoint; i++) {
      final MLData inputData
        = new BasicMLData(this.inputWindow);
      final MLData idealData
        = new BasicMLData(this.predictWindow);

      int index = i;

      // handle input window
      for (int j = 0; j < this.inputWindow; j++) {
        inputData.setData(j, data[index++]);
      }

      // handle predict window
      for (int j = 0; j < this.predictWindow; j++) {
        idealData.setData(j, data[index++]);
      }

      final MLDataPair pair = new BasicMLDataPair(inputData,
          idealData);
      result.add(pair);
View Full Code Here

    final ErrorCalculation errorCalculation = new ErrorCalculation();

    for (final MLDataPair pair : this.training) {
      // calculate the error
      final MLData output = this.network.compute(pair.getInput());

      for (int currentAdaline = 0; currentAdaline < output.size(); currentAdaline++) {
        final double diff = pair.getIdeal().getData(currentAdaline)
            - output.getData(currentAdaline);

        // weights
        for (int i = 0; i <= this.network.getInputCount(); i++) {
          final double input;

          if (i == this.network.getInputCount()) {
            input = 1.0;
          } else {
            input = pair.getInput().getData(i);
          }

          this.network.addWeight(0, i, currentAdaline,
              this.learningRate * diff * input);
        }
      }

      errorCalculation.updateError(output.getData(), pair.getIdeal()
          .getData(),pair.getSignificance());
    }

    // set the global error
    setError(errorCalculation.calculate());
View Full Code Here

   * @param output
   *            The output.
   */
  public final void compute(final double[] input, final double[] output) {
    final BasicMLData input2 = new BasicMLData(input);
    final MLData output2 = this.compute(input2);
    EngineArray.arrayCopy(output2.getData(), output);
  }
View Full Code Here

   * @return The output from the neural network.
   */
  @Override
  public final MLData compute(final MLData input) {
    try {
      final MLData result = new BasicMLData(this.structure.getFlat()
          .getOutputCount());
      this.structure.getFlat().compute(input.getData(), result.getData());
      return result;
    } catch (final ArrayIndexOutOfBoundsException ex) {
      throw new NeuralNetworkError(
          "Index exception: there was likely a mismatch between layer sizes, or the size of the input presented to the network.",
          ex);
View Full Code Here

   * @param input
   *            The input patter to present to the neural network.
   * @return The winning neuron.
   */
  public final int winner(final MLData input) {
    final MLData output = compute(input);
    return EngineArray.maxIndex(output.getData());
  }
View Full Code Here

   * @param input
   *            The input to this synapse.
   * @return The output from this synapse.
   */
  public MLData compute(final MLData input) {
    final MLData result = new BasicMLData(this.outputCount);

    if (this.neurons.size() == 0) {
      throw new NeuralNetworkError(
"This network has not been evolved yet, it has no neurons in the NEAT synapse.");
    }

    int flushCount = 1;

    if (this.snapshot) {
      flushCount = this.networkDepth;
    }

    // iterate through the network FlushCount times
    for (int i = 0; i < flushCount; ++i) {
      int outputIndex = 0;
      int index = 0;

      result.clear();

      // populate the input neurons
      while (this.neurons.get(index).getNeuronType()
          == NEATNeuronType.Input) {
        this.neurons.get(index).setOutput(input.getData(index));

        index++;
      }

      // set the bias neuron
      this.neurons.get(index++).setOutput(1);

      while (index < this.neurons.size()) {

        final NEATNeuron currentNeuron = this.neurons.get(index);

        double sum = 0;

        for (final NEATLink link : currentNeuron.getInboundLinks()) {
          final double weight = link.getWeight();
          final double neuronOutput = link.getFromNeuron()
              .getOutput();
          sum += weight * neuronOutput;
        }

        final double[] d = new double[1];
        d[0] = sum / currentNeuron.getActivationResponse();
        this.activationFunction.activationFunction(d,0,d.length);

        this.neurons.get(index).setOutput(d[0]);

        if (currentNeuron.getNeuronType() == NEATNeuronType.Output) {
          result.setData(outputIndex++, currentNeuron.getOutput());
        }
        index++;
      }
    }
   
    this.outputActivationFunction.activationFunction(result.getData(), 0, result.size());

    return result;
  }
View Full Code Here

  /**
   * {@inheritDoc}
   */
  @Override
  public final MLData compute(final MLData input) {
    final MLData temp = computeInstar(input);
    return computeOutstar(temp);
  }
View Full Code Here

   * Compute the instar layer.
   * @param input The input.
   * @return The output.
   */
  public final MLData computeInstar(final MLData input) {
    final MLData result = new BasicMLData(this.instarCount);
    int w, i, j;
    double sum, sumWinners, maxOut;
    int winner = 0;
    final boolean[] winners = new boolean[this.instarCount];

    for (i = 0; i < this.instarCount; i++) {
      sum = 0;
      for (j = 0; j < this.inputCount; j++) {
        sum += this.weightsInputToInstar.get(j, i) * input.getData(j);
      }
      result.setData(i, sum);
      winners[i] = false;
    }
    sumWinners = 0;
    for (w = 0; w < this.winnerCount; w++) {
      maxOut = Double.MIN_VALUE;
      for (i = 0; i < this.instarCount; i++) {
        if (!winners[i] && (result.getData(i) > maxOut)) {
          winner = i;
          maxOut = result.getData(winner);
        }
      }
      winners[winner] = true;
      sumWinners += result.getData(winner);
    }
    for (i = 0; i < this.instarCount; i++) {
      if (winners[i]
          && (Math.abs(sumWinners) > Encog.DEFAULT_DOUBLE_EQUAL)) {
        result.getData()[i] /= sumWinners;
      } else {
        result.getData()[i] = 0;
      }
    }

    return result;
  }
View Full Code Here

TOP

Related Classes of org.encog.ml.data.MLData

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.