Package org.encog.neural.neat

Examples of org.encog.neural.neat.NEATPopulation.reset()


  public static void main(final String args[]) {

    MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
    NEATPopulation pop = new NEATPopulation(2,1,1000);
    pop.setInitialConnectionDensity(1.0);// not required, but speeds training
    pop.reset();

    CalculateScore score = new TrainingSetScore(trainingSet);
    // train the neural network
   
    final EvolutionaryAlgorithm train = NEATUtil.constructNEATTrainer(pop,score);
View Full Code Here


   
    ActivationFunction af = this.factory.create(
        holder.getString(MLMethodFactory.PROPERTY_AF, false, MLActivationFactory.AF_SSIGMOID));

    NEATPopulation pop = new NEATPopulation(input,output,populationSize);
    pop.reset();
    pop.setActivationCycles(cycles);
    pop.setNEATActivationFunction(af);

    return pop;
  }
View Full Code Here

    }
    buffer.endLoad();
   
    NEATPopulation pop = new NEATPopulation(2,1,1000);
    pop.setInitialConnectionDensity(1.0);// not required, but speeds training
    pop.reset();

    CalculateScore score = new TrainingSetScore(buffer);
    // train the neural network
   
    final EvolutionaryAlgorithm train = NEATUtil.constructNEATTrainer(pop,score);
View Full Code Here

 
  public void testNEAT() {
    MLDataSet trainingSet = new BasicMLDataSet(XOR.XOR_INPUT, XOR.XOR_IDEAL);
    NEATPopulation pop = new NEATPopulation(2,1,1000);
    pop.setInitialConnectionDensity(1.0);// not required, but speeds training
    pop.reset();

    CalculateScore score = new TrainingSetScore(trainingSet);
    // train the neural network
   
    final EvolutionaryAlgorithm train = NEATUtil.constructNEATTrainer(pop,score);
View Full Code Here

  {
    final CalculateScore score = new TrainingSetScore(new BasicMLDataSet(FAKE_DATA, FAKE_DATA));

    // create a new random population and train it
    NEATPopulation pop = new NEATPopulation(FAKE_DATA[0].length, 1, 50);
    pop.reset();
    EvolutionaryAlgorithm training1 = NEATUtil.constructNEATTrainer(pop, score);
    training1.iteration();
    // enough training for now, backup current population to continue later
    final ByteArrayOutputStream serialized1 = new ByteArrayOutputStream();
    new PersistNEATPopulation().save(serialized1, training1.getPopulation());
View Full Code Here

  public void testSaveRead() throws Exception
  {
    final CalculateScore score = new TrainingSetScore(new BasicMLDataSet(FAKE_DATA, FAKE_DATA));
    NEATPopulation pop = new NEATPopulation(FAKE_DATA[0].length, 1, 50);
    pop.reset();
    // create a new random population and train it
    EvolutionaryAlgorithm training1 = NEATUtil.constructNEATTrainer(pop, score);
    training1.iteration();
    // enough training for now, backup current population
    final ByteArrayOutputStream serialized1 = new ByteArrayOutputStream();
View Full Code Here

  @Test
  public void testSort1() {
   
    MLDataSet trainingSet = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
    NEATPopulation pop = new NEATPopulation(2,1,100);
    pop.reset();
    CalculateScore score = new TrainingSetScore(trainingSet);
    final EvolutionaryAlgorithm train = NEATUtil.constructNEATTrainer(pop,score);
       
    NEATGenome genome1 = new NEATGenome();
    genome1.setAdjustedScore(3.0);
View Full Code Here

  @Test
  public void testSort2() {
   
    MLDataSet trainingSet = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
    NEATPopulation pop = new NEATPopulation(2,1,100);
    pop.reset();
    CalculateScore score = new TrainingSetScore(trainingSet);
    final EvolutionaryAlgorithm train = NEATUtil.constructNEATTrainer(pop,score);
       
    NEATGenome genome1 = new NEATGenome();
    genome1.setAdjustedScore(3.0);
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.