Package org.encog.neural.networks

Examples of org.encog.neural.networks.BasicNetwork


   
    public NeuralNetworkWrapper(String filePath,int inputNeurons,int outputNeurons){
        this.filePath=filePath;
        File file=new File(filePath);
        if(file==null || !file.exists()){
            neuralNetwork=new BasicNetwork();
            neuralNetwork.addLayer(new BasicLayer(null,true,inputNeurons));
            neuralNetwork.addLayer(new BasicLayer(new ActivationSigmoid(),true,ProgramConfig.NEURAL_HIDDEN_LAYER_LENGTH));
            neuralNetwork.addLayer(new BasicLayer(new ActivationSigmoid(),false,outputNeurons));
            neuralNetwork.getStructure().finalizeStructure();
            neuralNetwork.reset();
View Full Code Here


    }
    if (hidden2 > 0) {
      pattern.addHiddenLayer(hidden2);
    }

    final BasicNetwork network = (BasicNetwork)pattern.generate();
    network.reset();
    return network;
  }
View Full Code Here

    double outputLow = -1;
    double outputHigh = 1;
   
    // Create a basic neural network, just to examine activation functions.
    MLMethodFactory methodFactory = new MLMethodFactory();   
    BasicNetwork network = (BasicNetwork)methodFactory.create(getMethodName(), architecture, 1, 1);
   
    if( network.getLayerCount()<1 ) {
      throw new EncogError("Neural network does not have an output layer.");
    }
   
    ActivationFunction outputFunction = network.getActivation(network.getLayerCount()-1);
   
    double[] d = { -1000, -100, -50 };
    outputFunction.activationFunction(d, 0, d.length);
   
    if( d[0]>0 && d[1]>0 && d[2]>0 ) {
View Full Code Here

   *
   * @return A cloned copy of the neural network.
   */
  @Override
  public Object clone() {
    final BasicNetwork result = (BasicNetwork) ObjectCloner.deepCopy(this);
    return result;
  }
View Full Code Here

   *            Contains information about the job unit.
   */
  @Override
  public void performJobUnit(final JobUnitContext context) {

    final BasicNetwork network = (BasicNetwork) context.getJobUnit();
    BufferedMLDataSet buffer = null;
    MLDataSet useTraining = this.training;

    if (this.training instanceof BufferedMLDataSet) {
      buffer = (BufferedMLDataSet) this.training;
      useTraining = buffer.openAdditional();
    }

    // train the neural network

    double error = Double.POSITIVE_INFINITY;
    for (int z = 0; z < this.weightTries; z++) {
      network.reset();
      final Propagation train = new ResilientPropagation(network,
          useTraining);
      final StopTrainingStrategy strat = new StopTrainingStrategy(0.001,
          5);

      train.addStrategy(strat);
      train.setThreadCount(1); // force single thread mode

      for (int i = 0; (i < this.iterations) && !getShouldStop()
          && !strat.shouldStop(); i++) {
        train.iteration();
      }

      error = Math.min(error, train.getError());
    }

    if (buffer != null) {
      buffer.close();
    }

    if (!getShouldStop()) {
      // update min and max

      this.high = Math.max(this.high, error);
      this.low = Math.min(this.low, error);

      if (this.hidden1Size > 0) {
        int networkHidden1Count;
        int networkHidden2Count;

        if (network.getLayerCount() > 3) {
          networkHidden2Count = network.getLayerNeuronCount(2);
          networkHidden1Count = network.getLayerNeuronCount(1);
        } else {
          networkHidden2Count = 0;
          networkHidden1Count = network.getLayerNeuronCount(1);
        }

        int row, col;

        if (this.hidden2Size == 0) {
View Full Code Here

  public Object requestNextTask() {
    if (this.done || getShouldStop()) {
      return null;
    }

    final BasicNetwork network = generateNetwork();

    if (!increaseHiddenCounts()) {
      this.done = true;
    }
View Full Code Here

    }

    // now select the best network, which is the most simple of the
    // top networks.

    BasicNetwork choice = null;

    for (final BasicNetwork n : this.topNetworks) {
      if (n == null) {
        continue;
      }

      if (choice == null) {
        choice = n;
      } else {
        if (n.getStructure().calculateSize() < choice.getStructure()
            .calculateSize()) {
          choice = n;
        }
      }
    }
View Full Code Here

   */
  @Override
  public MLMethod generate() {
    BasicLayer hidden, input;

    final BasicNetwork network = new BasicNetwork();
    network.addLayer(input = new BasicLayer(this.activation, true,
        this.inputNeurons));
    network.addLayer(hidden = new BasicLayer(this.activation, true,
        this.hiddenNeurons));
    network.addLayer(new BasicLayer(null, false, this.outputNeurons));
    input.setContextFedBy(hidden);
    network.getStructure().finalizeStructure();
    network.reset();
    return network;
  }
View Full Code Here

  @Override
  public MLMethod generate() {

    BasicLayer hidden, output;

    final BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(null, true,
        this.inputNeurons));
    network.addLayer(hidden = new BasicLayer(this.activation, true,
        this.hiddenNeurons));
    network.addLayer(output = new BasicLayer(this.activation, false,
        this.outputNeurons));
    hidden.setContextFedBy(output);
    network.getStructure().finalizeStructure();
    network.reset();
    return network;
  }
View Full Code Here

   */
  @Override
  public void randomize(final MLMethod method) {

    if (method instanceof BasicNetwork) {
      final BasicNetwork network = (BasicNetwork) method;
      for (int i = 0; i < network.getLayerCount() - 1; i++) {
        randomize(network, i);
      }
    } else if (method instanceof MLEncodable) {
      final MLEncodable encode = (MLEncodable) method;
      final double[] encoded = new double[encode.encodedArrayLength()];
View Full Code Here

TOP

Related Classes of org.encog.neural.networks.BasicNetwork

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.