Package com.github.neuralnetworks.tensor

Examples of com.github.neuralnetworks.tensor.Matrix


  private final int[] maxoutWinners;

  public AparapiBackpropMaxout(Connections inputConnection, ValuesProvider valuesProvider, ValuesProvider activations, List<Tensor> weightUpdates, float learningRate, float momentum, float l1weightDecay, float l2weightDecay) {
      super(Arrays.asList(new Connections[] {inputConnection}), valuesProvider, inputConnection.getOutputLayer());

      Matrix m = TensorFactory.tensor(inputConnection.getInputLayer(), inputConnection, activations);
      this.ffActivation = m.getElements();
      this.activationStartPosition = m.getStartIndex();
      this.activationRowStep = m.getRowElementsDistance();
      this.activationColumnStep = m.getColumnElementsDistance();

      this.learningRate = momentum;
      this.momentum = momentum;
      this.l1weightDecay = l1weightDecay;
      this.l2weightDecay = l2weightDecay;
View Full Code Here


    @Test
    public void testScaling() {
  float[][] input = new float[][] { { 1, 3 }, { -2, 1.5f } };
  ScalingInputFunction si = new ScalingInputFunction(new SimpleInputProvider(input));
  Matrix m = TensorFactory.matrix(input);
  si.value(m);

  assertEquals(0.5f, m.get(0, 0), 0);
  assertEquals(-1f, m.get(0, 1), 0);
  assertEquals(1f, m.get(1, 0), 0);
  assertEquals(0.5f, m.get(1, 1), 0);
    }
View Full Code Here

  NeuralNetworkImpl nn = NNFactory.convNN(new int[][] { {2, 1, 1}, {1, 1}, {2}, {2}, {1} }, false);
  nn.setLayerCalculator(NNFactory.lcSigmoid(nn, null));
  NNFactory.lcMaxPooling(nn);

  FullyConnected c1 = (FullyConnected) nn.getInputLayer().getConnections().get(0).getOutputLayer().getConnections().get(1).getOutputLayer().getConnections().get(1);
  Matrix cg1 = c1.getWeights();
  cg1.set(0.1f, 0, 0);
  cg1.set(0.8f, 0, 1);
  cg1.set(0.4f, 1, 0);
  cg1.set(0.6f, 1, 1);

  FullyConnected c2 = (FullyConnected) nn.getOutputLayer().getConnections().iterator().next();
  Matrix cg2 = c2.getWeights();
  cg2.set(0.3f, 0, 0);
  cg2.set(0.9f, 0, 1);

  BackPropagationTrainer<?> bpt = TrainerFactory.backPropagation(nn, new SimpleInputProvider(new float[][] { { 0.35f, 0.9f } }, new float[][] { { 0.5f } }), new SimpleInputProvider(new float[][] { { 0.35f, 0.9f } }, new float[][] { { 0.5f } }), null, null, 1f, 0f, 0f, 0f, 0f, 1, 1, 1);
  bpt.train();

  assertEquals(0.09916, cg1.get(0, 0), 0.001);
  assertEquals(0.7978, cg1.get(0, 1), 0.001);
  assertEquals(0.3972, cg1.get(1, 0), 0.01);
  assertEquals(0.5928, cg1.get(1, 1), 0.01);
  assertEquals(0.272392, cg2.get(0, 0), 0.01);
  assertEquals(0.87305, cg2.get(0, 1), 0.01);
    }
View Full Code Here

      Tensor biasValue = TensorFactory.tensor(bias.getInputLayer(), bias, valuesProvider);
      if (biasValue.get(new int[biasValue.getDimensions().length]) == 0) {
    biasValue.forEach(i -> biasValue.getElements()[i] = 1);
      }

      Matrix weights = ((FullyConnected) bias).getWeights();
      Matrix output = TensorFactory.tensor(bias.getOutputLayer(), bias, valuesProvider);

      // for performance reasons no
      int rows = weights.getRows();
      int cols = output.getColumns();
      int weightsStartIndex = weights.getStartIndex();
      int outputStartIndex = output.getStartIndex();
      float[] wElements= weights.getElements();
      float[] oElements= output.getElements();

      for (int i = 0; i < rows; i++) {
    for (int j = 0; j < cols; j++) {
        oElements[outputStartIndex + i * cols + j] = wElements[weightsStartIndex + i];
    }
View Full Code Here

    public void testRBMLayerCalculator1() {
  Environment.getInstance().setUseWeightsSharedMemory(true);
  RBM rbm = NNFactory.rbm(2, 2, false);
  rbm.setLayerCalculator(NNFactory.lcSigmoid(rbm, null));

  Matrix cg1 = rbm.getMainConnections().getWeights();
  cg1.set(0.1f, 0, 0);
  cg1.set(0.8f, 0, 1);
  cg1.set(0.4f, 1, 0);
  cg1.set(0.6f, 1, 1);


  ValuesProvider vp = TensorFactory.tensorProvider(rbm, 1, true);
  Matrix visible = vp.get(rbm.getVisibleLayer());
  visible.set(0.35f, 0, 0);
  visible.set(0.9f, 1, 0);

  Set<Layer> calculated = new HashSet<Layer>();
  calculated.add(rbm.getVisibleLayer());
  rbm.getLayerCalculator().calculate(rbm, rbm.getHiddenLayer(), calculated, vp);

  Matrix hidden = vp.get(rbm.getHiddenLayer());
  assertEquals(0.68, hidden.get(0, 0), 0.01);
  assertEquals(0.6637, hidden.get(1, 0), 0.01);
    }
View Full Code Here

    protected final float l2weightDecay;

    public AparapiBackpropagationFullyConnected(List<Connections> inputConnections, ValuesProvider valuesProvider, ValuesProvider activations, List<Tensor> weightUpdates, Layer targetLayer, float learningRate, float momentum, float l1weightDecay, float l2weightDecay) {
  super(inputConnections, valuesProvider, targetLayer);

  Matrix m = TensorFactory.tensor(targetLayer, inputConnections, activations);
  this.ffActivation = m.getElements();
  this.activationStartPosition = m.getStartIndex();
  this.activationRowStep = m.getRowElementsDistance();
  this.activationColumnStep = m.getColumnElementsDistance();

  this.learningRate = momentum;
  this.momentum = momentum;
  this.l1weightDecay = l1weightDecay;
  this.l2weightDecay = l2weightDecay;
View Full Code Here

    public void testRBMLayerCalculator2() {
  Environment.getInstance().setUseWeightsSharedMemory(true);
  RBM rbm = NNFactory.rbm(2, 2, false);
  rbm.setLayerCalculator(NNFactory.lcSigmoid(rbm, null));
 
  Matrix cg1 = rbm.getMainConnections().getWeights();
  cg1.set(0.1f, 0, 0);
  cg1.set(0.8f, 1, 0);
  cg1.set(0.4f, 0, 1);
  cg1.set(0.6f, 1, 1);
 
  ValuesProvider vp = TensorFactory.tensorProvider(rbm, 1, true);
  Matrix hidden = vp.get(rbm.getHiddenLayer());
  hidden.set(0.35f, 0, 0);
  hidden.set(0.9f, 1, 0);
 
  Set<Layer> calculated = new HashSet<Layer>();
  calculated.add(rbm.getHiddenLayer());
  rbm.getLayerCalculator().calculate(rbm, rbm.getVisibleLayer(), calculated, vp);
 
  Matrix visible = vp.get(rbm.getVisibleLayer());
  assertEquals(0.68, visible.get(0, 0), 0.01);
  assertEquals(0.6637, visible.get(1, 0), 0.01);
    }
View Full Code Here

    public void testRBMLayerCalculator3() {
  Environment.getInstance().setUseWeightsSharedMemory(true);
  RBM rbm = NNFactory.rbm(3, 2, true);
  rbm.setLayerCalculator(NNFactory.lcSigmoid(rbm, null));

  Matrix cg1 = rbm.getMainConnections().getWeights();
  cg1.set(0.2f, 0, 0);
  cg1.set(0.4f, 0, 1);
  cg1.set(-0.5f, 0, 2);
  cg1.set(-0.3f, 1, 0);
  cg1.set(0.1f, 1, 1);
  cg1.set(0.2f, 1, 2);

  Matrix cgb1 = rbm.getHiddenBiasConnections().getWeights();
  cgb1.set(-0.4f, 0, 0);
  cgb1.set(0.2f, 1, 0);

  ValuesProvider vp = TensorFactory.tensorProvider(rbm, 1, true);
  Matrix visible = vp.get(rbm.getVisibleLayer());
  visible.set(1f, 0, 0);
  visible.set(0f, 1, 0);
  visible.set(1f, 2, 0);

  Set<Layer> calculated = new HashSet<Layer>();
  calculated.add(rbm.getVisibleLayer());
  rbm.getLayerCalculator().calculate(rbm, rbm.getHiddenLayer(), calculated, vp);

  Matrix hidden = vp.get(rbm.getHiddenLayer());
  assertEquals(0.332, hidden.get(0, 0), 0.001);
  assertEquals(0.525, hidden.get(1, 0), 0.001);
    }
View Full Code Here

    public void testRBMLayerCalculator4() {
  Environment.getInstance().setUseWeightsSharedMemory(true);
  RBM rbm = NNFactory.rbm(2, 3, true);
  rbm.setLayerCalculator(NNFactory.lcSigmoid(rbm, null));

  Matrix cg1 = rbm.getMainConnections().getWeights();
  cg1.set(0.2f, 0, 0);
  cg1.set(0.4f, 1, 0);
  cg1.set(-0.5f, 2, 0);
  cg1.set(-0.3f, 0, 1);
  cg1.set(0.1f, 1, 1);
  cg1.set(0.2f, 2, 1);

  Matrix cgb1 = rbm.getVisibleBiasConnections().getWeights();
  cgb1.set(-0.4f, 0, 0);
  cgb1.set(0.2f, 1, 0);

  ValuesProvider vp = TensorFactory.tensorProvider(rbm, 1, true);
  Matrix hidden = vp.get(rbm.getHiddenLayer());
  hidden.set(1f, 0, 0);
  hidden.set(0f, 1, 0);
  hidden.set(1f, 2, 0);

  Set<Layer> calculated = new HashSet<Layer>();
  calculated.add(rbm.getHiddenLayer());
  rbm.getLayerCalculator().calculate(rbm, rbm.getVisibleLayer(), calculated, vp);

  Matrix visible = vp.get(rbm.getVisibleLayer());
  assertEquals(0.332, visible.get(0, 0), 0.001);
  assertEquals(0.525, visible.get(1, 0), 0.001);
    }
View Full Code Here

  //Environment.getInstance().setExecutionMode(EXECUTION_MODE.SEQ);

  Environment.getInstance().setUseWeightsSharedMemory(true);
  RBM rbm = NNFactory.rbm(3, 2, true);

  Matrix cg1 = rbm.getMainConnections().getWeights();
  cg1.set(0.2f, 0, 0);
  cg1.set(0.4f, 0, 1);
  cg1.set(-0.5f, 0, 2);
  cg1.set(-0.3f, 1, 0);
  cg1.set(0.1f, 1, 1);
  cg1.set(0.2f, 1, 2);

  Matrix cgb1 = rbm.getVisibleBiasConnections().getWeights();
  cgb1.set(0f, 0, 0);
  cgb1.set(0f, 1, 0);
  cgb1.set(0f, 2, 0);

  Matrix cgb2 = rbm.getHiddenBiasConnections().getWeights();
  cgb2.set(-0.4f, 0, 0);
  cgb2.set(0.2f, 1, 0);

  AparapiCDTrainer t = TrainerFactory.cdSigmoidTrainer(rbm, new SimpleInputProvider(new float[][] { { 1, 0, 1 } }, null), null, null, null, 1f, 0f, 0f, 0f, 1, 1, 1, true);

  t.train();

  assertEquals(0.52276707, cgb1.get(0, 0), 0.00001);
  assertEquals(- 0.54617375, cgb1.get(1, 0), 0.00001);
  assertEquals(0.51522285, cgb1.get(2, 0), 0.00001);
 
  assertEquals(-0.4 - 0.08680013, cgb2.get(0, 0), 0.00001);
  assertEquals(0.2 - 0.02693379, cgb2.get(1, 0), 0.00001);

  assertEquals(0.2 + 0.13203661, cg1.get(0, 0), 0.00001);
  assertEquals(0.4 - 0.22863509,  cg1.get(0, 1), 0.00001);
  assertEquals(-0.5 + 0.12887852, cg1.get(0, 2), 0.00001);
  assertEquals(-0.3 + 0.26158813, cg1.get(1, 0), 0.00001);
View Full Code Here

TOP

Related Classes of com.github.neuralnetworks.tensor.Matrix

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.