Package org.encog.neural.networks

Examples of org.encog.neural.networks.BasicNetwork


public class MouseFactory {
 
  public static NeuralMouse generateMouse(Maze maze)
  {
    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(Constants.INPUT_NEURON_COUNT));
    network.addLayer(new BasicLayer(60));
    //network.addLayer(new BasicLayer(30));
    network.addLayer(new BasicLayer(Constants.OUTPUT_NEURON_COUNT));
    network.getStructure().finalizeStructure();
    network.reset();
   
    NeuralMouse mouse = new NeuralMouse(network,maze);
    return mouse;
  }
View Full Code Here


    temp.analyze(this.normalizedSunspots);
    return temp.process(this.normalizedSunspots);
  }

  public BasicNetwork createNetwork() {
    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(WINDOW_SIZE));
    network.addLayer(new BasicLayer(10));
    network.addLayer(new BasicLayer(1));
    network.getStructure().finalizeStructure();
    network.reset();
    return network;
  }
View Full Code Here

    }
  }

  public void run() {
    normalizeSunspots(0.1, 0.9);
    BasicNetwork network = createNetwork();
    MLDataSet training = generateTraining();
    train(network, training);
    predict(network);

  }
View Full Code Here

 
  public static void main(String[] args)
  {
    MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
    BasicNetwork network = EncogUtility.simpleFeedForward(2, 4, 0, 1, false);
    ResilientPropagation train = new ResilientPropagation(network, trainingSet);
    train.addStrategy(new RequiredImprovementStrategy(5));
   
    System.out.println("Perform initial train.");
    EncogUtility.trainToError(train,0.01);
View Full Code Here

  public static final int MILIS = 1000;

 
  public static int evaluateTrain(int input, int hidden1, int hidden2,
      int output) {
    final BasicNetwork network = EncogUtility.simpleFeedForward(input,
        hidden1, hidden2, output, true);
    final MLDataSet training = RandomTrainingFactory.generate(1000,
        10000, input, output, -1, 1);
 
   
View Full Code Here

    }
   
  }

  public static FlatNetwork createNetwork() {
    BasicNetwork network = EncogUtility
        .simpleFeedForward(2, 4, 0, 1, false);
    Randomizer randomizer = new ConsistentRandomizer(-1, 1);
    randomizer.randomize(network);
    return network.getStructure().getFlat().clone();
  }
View Full Code Here

   * @param args No arguments are used.
   */
  public static void main(final String args[]) {
   
    // create a neural network, without using a factory
    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(null,true,2));
    network.addLayer(new BasicLayer(new ActivationSigmoid(),true,3));
    network.addLayer(new BasicLayer(new ActivationSigmoid(),false,1));
    network.getStructure().finalizeStructure();
    network.reset();

    // create training data
    MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
   
    // train the neural network
    final ResilientPropagation train = new ResilientPropagation(network, trainingSet);

    int epoch = 1;

    do {
      train.iteration();
      System.out
          .println("Epoch #" + epoch + " Error:" + train.getError());
      epoch++;
    } while(train.getError() > 0.01);

    // test the neural network
    System.out.println("Neural Network Results:");
    for(MLDataPair pair: trainingSet ) {
      final MLData output = network.compute(pair.getInput());
      System.out.println(pair.getInput().getData(0) + "," + pair.getInput().getData(1)
          + ", actual=" + output.getData(0) + ",ideal=" + pair.getIdeal().getData(0));
    }
  }
View Full Code Here

    GaussianRandomizer gaussianRandom = new GaussianRandomizer(0, 1);

    System.out.println("Error improvement, higher is better.");
    BasicMLDataSet training = new BasicMLDataSet(XOR_INPUT,
        XOR_IDEAL);
    BasicNetwork network = EncogUtility.simpleFeedForward(2, 10, 0, 1, true);

    System.out.println("Range random: "
        + evaluateRandomizer(rangeRandom, network, training));
    System.out.println("Nguyen-Widrow: "
        + evaluateRandomizer(nwrRandom, network, training));
View Full Code Here

  public static final int OUTPUT_COUNT = 20;
 
  public static void perform(int thread)
  {
    long start = System.currentTimeMillis();
    final BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(MultiBench.INPUT_COUNT));
    network.addLayer(new BasicLayer(MultiBench.HIDDEN_COUNT));
    network.addLayer(new BasicLayer(MultiBench.OUTPUT_COUNT));
    network.getStructure().finalizeStructure();
    network.reset();
   
    final MLDataSet training = RandomTrainingFactory.generate(1000,50000,
        INPUT_COUNT, OUTPUT_COUNT, -1, 1);
   
    ResilientPropagation rprop = new ResilientPropagation(network,training);
View Full Code Here

  public static double XOR_IDEAL[][] = { { 0.0 }, { 1.0 }, { 1.0 }, { 0.0 } };

  public void trainAndSave() {
    System.out.println("Training XOR network to under 1% error rate.");
    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(2));
    network.addLayer(new BasicLayer(2));
    network.addLayer(new BasicLayer(1));
    network.getStructure().finalizeStructure();
    network.reset();

    MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);

    // train the neural network
    final MLTrain train = new ResilientPropagation(network, trainingSet);

    do {
      train.iteration();
    } while (train.getError() > 0.009);

    double e = network.calculateError(trainingSet);
    System.out.println("Network traiined to error: " + e);

    System.out.println("Saving network");
    EncogDirectoryPersistence.saveObject(new File(FILENAME), network);
  }
View Full Code Here

TOP

Related Classes of org.encog.neural.networks.BasicNetwork

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.