Package com.github.neuralnetworks.input

Examples of com.github.neuralnetworks.input.SimpleInputProvider


  // example 0 0 0 1 0 0 means that he has a flu vaccine. It's possible
  // to have combinations between both - for exmample 0 1 0 1 0 0 means
  // that the patient is vaccinated, but he's also coughing. We will
  // consider a patient to be sick when he has at least two of the first
  // three and healthy if he has two of the second three
  TrainingInputProvider trainInputProvider = new SimpleInputProvider(new float[][] { { 1, 1, 1, 0, 0, 0 }, { 1, 0, 1, 0, 0, 0 }, { 1, 1, 0, 0, 0, 0 }, { 0, 1, 1, 0, 0, 0 }, { 0, 1, 1, 1, 0, 0 }, { 0, 0, 0, 1, 1, 1 }, { 0, 0, 1, 1, 1, 0 }, { 0, 0, 0, 1, 0, 1 }, { 0, 0, 0, 0, 1, 1 }, { 0, 0, 0, 1, 1, 0 } }, null);
  TrainingInputProvider testInputProvider = new SimpleInputProvider(new float[][] { { 1, 1, 1, 0, 0, 0 }, { 1, 0, 1, 0, 0, 0 }, { 1, 1, 0, 0, 0, 0 }, { 0, 1, 1, 0, 0, 0 }, { 0, 1, 1, 1, 0, 0 }, { 0, 0, 0, 1, 1, 1 }, { 0, 0, 1, 1, 1, 0 }, { 0, 0, 0, 1, 0, 1 }, { 0, 0, 0, 0, 1, 1 }, { 0, 0, 0, 1, 1, 0 } }, new float[][] { { 1, 0 }, { 1, 0 }, { 1, 0 }, { 1, 0 }, { 1, 0 }, { 0, 1 }, { 0, 1 }, { 0, 1 }, { 0, 1 }, { 0, 1 } });
  MultipleNeuronsOutputError error = new MultipleNeuronsOutputError();

  // backpropagation for autoencoders
  BackPropagationAutoencoder t = TrainerFactory.backPropagationAutoencoder(ae, trainInputProvider, testInputProvider, error, new NNRandomInitializer(new MersenneTwisterRandomInitializer(-0.01f, 0.01f)), 0.02f, 0.7f, 0f, 0f, 0f, 1, 1, 100);
View Full Code Here


  Environment.getInstance().setUseWeightsSharedMemory(false);
  Environment.getInstance().setUseDataSharedMemory(false);
  NeuralNetworkImpl mlp = NNFactory.mlpSigmoid(new int[] { 2, 2, 1 }, true);

  // create training and testing input providers
  SimpleInputProvider input = new SimpleInputProvider(new float[][] { {0, 0}, {0, 1}, {1, 0}, {1, 1} }, new float[][] { {0}, {1}, {1}, {0} });

  // create backpropagation trainer for the network
  BackPropagationTrainer<?> bpt = TrainerFactory.backPropagation(mlp, input, input, new XorOutputError(), new NNRandomInitializer(new MersenneTwisterRandomInitializer(-0.01f, 0.01f)), 0.1f, 0.9f, 0f, 0f, 0f, 1, 1, 100000);

  // add logging
View Full Code Here

  FullyConnected b2 = (FullyConnected) fc2.getOutputLayer().getConnections().get(1);
  b2.getWeights().set(-6.1552725f, 0, 0);

  // create training and testing input providers
  SimpleInputProvider input = new SimpleInputProvider(new float[][] { {0, 0}, {0, 1}, {1, 0}, {1, 1} }, new float[][] { {0}, {1}, {1}, {0} });

  // create backpropagation trainer for the network
  BackPropagationTrainer<?> bpt = TrainerFactory.backPropagation(mlp, input, input, new XorOutputError(), null, 1f, 0.5f, 0f, 0f, 0f, 1, 1, 5000);

  // add logging
View Full Code Here

  FullyConnected mlpfco = (FullyConnected) mlp.getOutputLayer().getConnections().get(0);
  mlpfco.getWeights().set(0.05f, 0, 0);
  mlpfco.getWeights().set(0.08f, 0, 1);

  // compare bp
  SimpleInputProvider inputProvider = new SimpleInputProvider(new float[][] { {0, 0}, {0, 1}, {1, 0}, {1, 1} }, new float[][] { {0}, {1}, {1}, {0} });

  BackPropagationTrainer<?> mlpbpt = TrainerFactory.backPropagation(mlp, inputProvider, inputProvider, new XorOutputError(), null, 1f, 0f, 0f, 0f, 0f, 1, 1, 10000);
  mlpbpt.train();
  mlpbpt.test();
View Full Code Here

  FullyConnected c2 = (FullyConnected) mlp.getOutputLayer().getConnections().iterator().next();
  Matrix cg2 = c2.getWeights();
  cg2.set(0.3f, 0, 0);
  cg2.set(0.9f, 0, 1);

  BackPropagationTrainer<?> bpt = TrainerFactory.backPropagation(mlp, new SimpleInputProvider(new float[][] { { 0.35f, 0.9f } }, new float[][] { { 0.5f } }), new SimpleInputProvider(new float[][] { { 0.35f, 0.9f } }, new float[][] { { 0.5f } }), null, null, 1f, 0f, 0f, 0f, 0f, 1, 1, 1);
  bpt.train();

  assertEquals(0.09916, cg1.get(0, 0), 0.01);
  assertEquals(0.7978, cg1.get(0, 1), 0.01);
  assertEquals(0.3972, cg1.get(1, 0), 0.01);
View Full Code Here

  FullyConnected cb2 = (FullyConnected) c.get(3);
  Matrix cgb2 = cb2.getWeights();
  cgb2.set(0.1f, 0, 0);

  BackPropagationTrainer<?> bpt = TrainerFactory.backPropagation(mlp, new SimpleInputProvider(new float[][] { { 1, 0, 1 } }, new float[][] { { 1 } }), new SimpleInputProvider(new float[][] { { 1, 0, 1 } }, new float[][] { { 1 } }), null, null, 0.9f, 0f, 0f, 0f, 0f, 1, 1, 1);
  bpt.train();

  assertEquals(0.192, cg1.get(0, 0), 0.001);
  assertEquals(0.4, cg1.get(0, 1), 0.001);
  assertEquals(-0.508, cg1.get(0, 2), 0.001);
View Full Code Here

  FullyConnected cb2 = (FullyConnected) c.get(3);
  Matrix cgb2 = cb2.getWeights();
  cgb2.set(0.1f, 0, 0);

  BackPropagationTrainer<?> bpt = TrainerFactory.backPropagation(mlp, new SimpleInputProvider(new float[][] { { 1, 0, 1 } }, new float[][] { { 1 } }), new SimpleInputProvider(new float[][] { { 1, 0, 1 } }, new float[][] { { 1 } }), null, null, 0.9f, 0f, 0f, 0f, 0.01f, 1, 1, 1);
  bpt.train();

  assertEquals(0.192, cg1.get(0, 0), 0.001);
  assertEquals(0.4, cg1.get(0, 1), 0.001);
  assertEquals(-0.508, cg1.get(0, 2), 0.001);
View Full Code Here

  in.set(8, 0, 0);
  in.set(2, 1, 0);
  in.set(1, 0, 1);
  in.set(7, 1, 1);

  BackPropagationTrainer<?> bpt = TrainerFactory.backPropagation(nn, new SimpleInputProvider(new float[][] { { 8, 2 } }, new float[][] { { 1 } }), null, null, null, 0.9f, 0f, 0f, 0f, 0f, 1, 1, 1);
  bpt.train();

  assertEquals(0.1f, cg1.get(0, 0), 0f);
  assertEquals(0.5198f, cg1.get(0, 1), 0f);
  assertEquals(0.1f, cg1.get(1, 0), 0f);
View Full Code Here

  FullyConnected cb2 = (FullyConnected) c.get(3);
  Matrix cgb2 = cb2.getWeights();
  cgb2.set(0.1f, 0, 0);

  BackPropagationTrainer<?> bpt = TrainerFactory.backPropagation(mlp, new SimpleInputProvider(new float[][] { { 1, 0, 1 }, { 1, 1, 0 } }, new float[][] { { 1 }, { 1 } }), null, null, null, 0.9f, 0f, 0f, 0f, 0f, 1, 1, 1);
  bpt.train();

  assertEquals(0.1849, cg1.get(0, 0), 0.0001);
  assertEquals(0.3927, cg1.get(0, 1), 0.0001);
  assertEquals(-0.508, cg1.get(0, 2), 0.001);
View Full Code Here

  Matrix cgb2 = firstRBM.getHiddenBiasConnections().getWeights();
  cgb2.set(-0.4f, 0, 0);
  cgb2.set(0.2f, 1, 0);

  SimpleInputProvider inputProvider = new SimpleInputProvider(new float[][] { { 1, 0, 1 } }, null);

  AparapiCDTrainer firstTrainer = TrainerFactory.cdSigmoidTrainer(firstRBM, null, null, null, null, 1f, 0f, 0f, 0f, 1, 1, 1, true);

  RBM secondRBM = dbn.getLastNeuralNetwork();
View Full Code Here

TOP

Related Classes of com.github.neuralnetworks.input.SimpleInputProvider

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.