Package com.github.neuralnetworks.training.rbm

Examples of com.github.neuralnetworks.training.rbm.AparapiCDTrainer


  RBMLayerCalculator lc = NNFactory.rbmSigmoidSigmoid(rbm);
  ConnectionCalculatorFullyConnected cc = (ConnectionCalculatorFullyConnected) lc.getConnectionCalculator(rbm.getInputLayer());
  cc.addPreTransferFunction(new BernoulliDistribution());

  return new AparapiCDTrainer(rbmProperties(rbm, lc, trainingSet, testingSet, error, rand, learningRate, momentum, l1weightDecay, l2weightDecay, gibbsSampling, isPersistentCD));
    }
View Full Code Here


  RBMLayerCalculator lc = NNFactory.rbmSigmoidSigmoid(rbm);
  ConnectionCalculatorFullyConnected cc = (ConnectionCalculatorFullyConnected) lc.getConnectionCalculator(rbm.getInputLayer());
  cc.addPreTransferFunction(new BernoulliDistribution());

  return new AparapiCDTrainer(rbmProperties(rbm, lc, trainingSet, testingSet, error, rand, learningRate, momentum, l1weightDecay, l2weightDecay, gibbsSampling, isPersistentCD));
    }
View Full Code Here

  TrainingInputProvider trainInputProvider = new SimpleInputProvider(new float[][] { { 1, 1, 1, 0, 0, 0 }, { 1, 0, 1, 0, 0, 0 }, { 1, 1, 0, 0, 0, 0 }, { 0, 1, 1, 0, 0, 0 }, { 0, 1, 1, 1, 0, 0 }, { 0, 0, 0, 1, 1, 1 }, { 0, 0, 1, 1, 1, 0 }, { 0, 0, 0, 1, 0, 1 }, { 0, 0, 0, 0, 1, 1 }, { 0, 0, 0, 1, 1, 0 } }, null, 1000, 1);
  TrainingInputProvider testInputProvider = new SimpleInputProvider(new float[][] { { 1, 1, 1, 0, 0, 0 }, { 1, 0, 1, 0, 0, 0 }, { 1, 1, 0, 0, 0, 0 }, { 0, 1, 1, 0, 0, 0 }, { 0, 1, 1, 1, 0, 0 }, { 0, 0, 0, 1, 1, 1 }, { 0, 0, 1, 1, 1, 0 }, { 0, 0, 0, 1, 0, 1 }, { 0, 0, 0, 0, 1, 1 }, { 0, 0, 0, 1, 1, 0 } }, new float[][] { { 1, 0 }, { 1, 0 }, { 1, 0 }, { 1, 0 }, { 1, 0 }, { 0, 1 }, { 0, 1 }, { 0, 1 }, { 0, 1 }, { 0, 1 } }, 10, 1);
  MultipleNeuronsOutputError error = new MultipleNeuronsOutputError();

  // Contrastive divergence training
  AparapiCDTrainer t = TrainerFactory.cdSigmoidTrainer(rbm, trainInputProvider, testInputProvider, error, new NNRandomInitializer(new MersenneTwisterRandomInitializer(-0.01f, 0.01f)), 0.02f, 0.5f, 0f, 0f, 1, false);
  t.setLayerCalculator(NNFactory.rbmSigmoidSigmoid(rbm));

  // log data
  t.addEventListener(new LogTrainingListener(Thread.currentThread().getStackTrace()[1].getMethodName(), true, false));

  // training
  t.train();

  // testing
  t.test();

  assertEquals(0, t.getOutputError().getTotalNetworkError(), 0);
    }
View Full Code Here

  TrainingInputProvider trainInputProvider = new SimpleInputProvider(new float[][] { { 1, 1, 1, 0, 0, 0 }, { 1, 0, 1, 0, 0, 0 }, { 1, 1, 0, 0, 0, 0 }, { 0, 1, 1, 0, 0, 0 }, { 0, 1, 1, 1, 0, 0 }, { 0, 0, 0, 1, 1, 1 }, { 0, 0, 1, 1, 1, 0 }, { 0, 0, 0, 1, 0, 1 }, { 0, 0, 0, 0, 1, 1 }, { 0, 0, 0, 1, 1, 0 } }, null, 1000, 1);
  TrainingInputProvider testInputProvider = new SimpleInputProvider(new float[][] { { 1, 1, 1, 0, 0, 0 }, { 1, 0, 1, 0, 0, 0 }, { 1, 1, 0, 0, 0, 0 }, { 0, 1, 1, 0, 0, 0 }, { 0, 1, 1, 1, 0, 0 }, { 0, 0, 0, 1, 1, 1 }, { 0, 0, 1, 1, 1, 0 }, { 0, 0, 0, 1, 0, 1 }, { 0, 0, 0, 0, 1, 1 }, { 0, 0, 0, 1, 1, 0 } }, new float[][] { { 1, 0 }, { 1, 0 }, { 1, 0 }, { 1, 0 }, { 1, 0 }, { 0, 1 }, { 0, 1 }, { 0, 1 }, { 0, 1 }, { 0, 1 } }, 10, 1);
  MultipleNeuronsOutputError error = new MultipleNeuronsOutputError();

  // Persistent Contrastive divergence trainer
  AparapiCDTrainer t = TrainerFactory.cdSigmoidTrainer(rbm, trainInputProvider, testInputProvider, error, new NNRandomInitializer(new MersenneTwisterRandomInitializer(-0.01f, 0.01f)), 0.02f, 0.5f, 0f, 0f, 1, true);
  t.setLayerCalculator(NNFactory.rbmSigmoidSigmoid(rbm));

  // log data
  t.addEventListener(new LogTrainingListener(Thread.currentThread().getStackTrace()[1].getMethodName(), true, false));

  Environment.getInstance().setExecutionMode(EXECUTION_MODE.SEQ);

  // training
  t.train();

  // testing
  t.test();
 
  assertEquals(0, t.getOutputError().getTotalNetworkError(), 0);
    }
View Full Code Here

  Matrix cgb2 = rbm.getHiddenBiasConnections().getConnectionGraph();
  cgb2.set(-0.4f, 0, 0);
  cgb2.set(0.2f, 1, 0);

  AparapiCDTrainer t = TrainerFactory.cdSigmoidTrainer(rbm, new SimpleInputProvider(new float[][] { { 1, 0, 1 } }, null, 1, 1), null, null, null, 1f, 0f, 0f, 0f, 1, true);
  t.setLayerCalculator(NNFactory.rbmSigmoidSigmoid(rbm));

  Environment.getInstance().setExecutionMode(EXECUTION_MODE.SEQ);

  t.train();

  assertEquals(0.52276707, cgb1.get(0, 0), 0.00001);
  assertEquals(- 0.54617375, cgb1.get(1, 0), 0.00001);
  assertEquals(0.51522285, cgb1.get(2, 0), 0.00001);
 
View Full Code Here

  TrainingInputProvider trainInputProvider = new IrisInputProvider(1, 150000, new IrisTargetMultiNeuronOutputConverter(), false, true, false);
  TrainingInputProvider testInputProvider = new IrisInputProvider(1, 150, new IrisTargetMultiNeuronOutputConverter(), false, true, false);
  MultipleNeuronsOutputError error = new MultipleNeuronsOutputError();

  // trainers
  AparapiCDTrainer t = TrainerFactory.cdSigmoidTrainer(rbm, trainInputProvider, testInputProvider, error, new NNRandomInitializer(new MersenneTwisterRandomInitializer(-0.01f, 0.01f)), 0.01f, 0.5f, 0f, 0f, 1, true);

  // log data
  t.addEventListener(new LogTrainingListener(Thread.currentThread().getStackTrace()[1].getMethodName()));

  // execution mode
  Environment.getInstance().setExecutionMode(EXECUTION_MODE.SEQ);

  // training
  t.train();

  // training
  t.test();

  // 2 of the iris classes are linearly not separable - an error of 1/3 illustrates that
  assertEquals(0, t.getOutputError().getTotalNetworkError(), 1/3f);
    }
View Full Code Here

  TrainingInputProvider trainInputProvider = new IrisInputProvider(150, 150000, new IrisTargetMultiNeuronOutputConverter(), false, true, false);
  TrainingInputProvider testInputProvider = new IrisInputProvider(1, 150, new IrisTargetMultiNeuronOutputConverter(), false, true, false);
  // rbm trainers for each layer
  AparapiCDTrainer firstTrainer = TrainerFactory.cdSigmoidTrainer(dbn.getFirstNeuralNetwork(), null, null, null, new NNRandomInitializer(new MersenneTwisterRandomInitializer(-0.01f, 0.01f)), 0.01f, 0.5f, 0f, 0f, 1, true);
  AparapiCDTrainer lastTrainer = TrainerFactory.cdSigmoidTrainer(dbn.getLastNeuralNetwork(), null, null, null, new NNRandomInitializer(new MersenneTwisterRandomInitializer(-0.01f, 0.01f)), 0.01f, 0.5f, 0f, 0f, 1, true);

  Map<NeuralNetwork, OneStepTrainer<?>> map = new HashMap<>();
  map.put(dbn.getFirstNeuralNetwork(), firstTrainer);
  map.put(dbn.getLastNeuralNetwork(), lastTrainer);
View Full Code Here

  MnistInputProvider trainInputProvider = new MnistInputProvider("train-images.idx3-ubyte", "train-labels.idx1-ubyte", 1, 1, new MnistTargetMultiNeuronOutputConverter());
  trainInputProvider.addInputModifier(new ScalingInputFunction(255));
  MnistInputProvider testInputProvider = new MnistInputProvider("t10k-images.idx3-ubyte", "t10k-labels.idx1-ubyte", 1000, 1, new MnistTargetMultiNeuronOutputConverter());
  testInputProvider.addInputModifier(new ScalingInputFunction(255));

  AparapiCDTrainer t = TrainerFactory.cdSigmoidTrainer(rbm, trainInputProvider, testInputProvider,  new MultipleNeuronsOutputError(), new NNRandomInitializer(new MersenneTwisterRandomInitializer(-0.01f, 0.01f)), 0.01f, 0.5f, 0f, 0f, 1, false);

  t.addEventListener(new LogTrainingListener(Thread.currentThread().getStackTrace()[1].getMethodName(), false, true));
  Environment.getInstance().setExecutionMode(EXECUTION_MODE.CPU);
  t.train();
  t.test();

  assertEquals(0, t.getOutputError().getTotalNetworkError(), 0.1);
    }
View Full Code Here

  IrisInputProvider testInputProvider = new IrisInputProvider(new IrisTargetMultiNeuronOutputConverter(), false);
  testInputProvider.addInputModifier(new ScalingInputFunction(testInputProvider));
  MultipleNeuronsOutputError error = new MultipleNeuronsOutputError();

  // trainers
  AparapiCDTrainer t = TrainerFactory.cdSigmoidBinaryTrainer(rbm, trainInputProvider, testInputProvider, error, new NNRandomInitializer(new MersenneTwisterRandomInitializer(-0.01f, 0.01f)), 0.01f, 0.5f, 0f, 0f, 1, 1, 100, true);

  // log data
  t.addEventListener(new LogTrainingListener(Thread.currentThread().getStackTrace()[1].getMethodName()));

  // training
  t.train();

  // training
  t.test();

  // 2 of the iris classes are linearly not separable - an error of 2/3 illustrates that
  assertEquals(0, t.getOutputError().getTotalNetworkError(), 2/3f);
    }
View Full Code Here

  IrisInputProvider testInputProvider = new IrisInputProvider(new IrisTargetMultiNeuronOutputConverter(), false);
  testInputProvider.addInputModifier(new ScalingInputFunction(testInputProvider));

  // rbm trainers for each layer
  AparapiCDTrainer firstTrainer = TrainerFactory.cdSigmoidBinaryTrainer(dbn.getFirstNeuralNetwork(), null, null, null, new NNRandomInitializer(new MersenneTwisterRandomInitializer(-0.01f, 0.01f)), 0.01f, 0.5f, 0f, 0f, 1, 150, 1000, true);
  AparapiCDTrainer lastTrainer = TrainerFactory.cdSigmoidBinaryTrainer(dbn.getLastNeuralNetwork(), null, null, null, new NNRandomInitializer(new MersenneTwisterRandomInitializer(-0.01f, 0.01f)), 0.01f, 0.5f, 0f, 0f, 1, 150, 1000, true);

  Map<NeuralNetwork, OneStepTrainer<?>> map = new HashMap<>();
  map.put(dbn.getFirstNeuralNetwork(), firstTrainer);
  map.put(dbn.getLastNeuralNetwork(), lastTrainer);
View Full Code Here

TOP

Related Classes of com.github.neuralnetworks.training.rbm.AparapiCDTrainer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.