Examples of TransferFunction


Examples of org.apache.flex.forks.batik.ext.awt.image.TransferFunction

        SampleModel sm = srcRI.getSampleModel();
        int bands = sm.getNumBands();

        // System.out.println("Slope, Intercept: " + slope + ", " + intercept);
        TransferFunction [] tfs = new TransferFunction[bands];
        TransferFunction    tf  = new LinearTransfer(slope, intercept);
        for (int i=0; i<tfs.length; i++)
            tfs[i] = tf;

        return new ComponentTransferRed(convertSourceCS(srcRI), tfs, null);
    }
View Full Code Here

Examples of org.apache.flex.forks.batik.ext.awt.image.TransferFunction

     * Converts a ComponentTransferFunction to a TransferFunction
     */
    private static TransferFunction getTransferFunction
        (ComponentTransferFunction function){

        TransferFunction txfFunc = null;
        if(function == null){
            txfFunc = new IdentityTransfer();
        }
        else{
            switch(function.getType()){
View Full Code Here

Examples of org.jkff.ire.fa.TransferFunction

        public TransferFunctions<ST> composeAll(final Sequence<TransferFunctions<ST>> tfs) {
            if(tfs.length() == 0) {
                return UNIT;
            }

            TransferFunction sumForward = forwardReducer.composeAll(new Sequence<TransferFunction<ST>>() {
                public int length() {
                    return tfs.length();
                }

                public TransferFunction<ST> get(int i) {
                    return tfs.get(i).forward;
                }
            });

            TransferFunction sumBackward = backwardReducer.composeAll(new Sequence<TransferFunction<ST>>() {
                public int length() {
                    return tfs.length();
                }

                public TransferFunction<ST> get(int i) {
View Full Code Here

Examples of org.neuroph.core.transfer.TransferFunction

                    inputFunction = new InputFunction(weightsFunction, summingFunction);                   
                }



    TransferFunction transferFunction = createTransferFunction(neuronProperties.getTransferFunctionProperties());

    Neuron neuron = null;
    Class neuronClass = neuronProperties.getNeuronType();

                // use two param constructor to create neuron
View Full Code Here

Examples of org.neuroph.core.transfer.TransferFunction

   * @param tfProperties
   *            transfer function properties
   * @return returns transfer function
   */
  private static TransferFunction createTransferFunction(Properties tfProperties) {
    TransferFunction transferFunction = null;

    Class  tfClass = (Class)tfProperties.getProperty("transferFunction");

                    try {
                        Class[] paramTypes = null;
View Full Code Here

Examples of org.neuroph.core.transfer.TransferFunction

        @Override
  protected void adjustOutputNeurons(double[] patternError) {
           
                // get output layer
                MatrixMlpLayer outputLayer = (MatrixMlpLayer)matrixLayers[matrixLayers.length - 1];
                TransferFunction transferFunction = outputLayer.getTransferFunction();

                // get output vector
                double[] outputs = outputLayer.getOutputs();
                double[] netInputs = outputLayer.getNetInput();
                double[] neuronErrors = outputLayer.getErrors(); // these will hold  -should be set from here!!!!

                // calculate errors(deltas) for all output neurons
                for(int i = 0; i < outputs.length; i++) {
                    neuronErrors[i] = patternError[i] * transferFunction.getDerivative(netInputs[i]); // ovde mi treba weighted sum, da ne bi morao ponovo da racunam
                }

                // update weights
                this.updateLayerWeights(outputLayer, neuronErrors);
        }
View Full Code Here

Examples of org.neuroph.core.transfer.TransferFunction

            int layersCount = matrixMlp.getLayersCount();
           
            for ( int layerIdx = layersCount -2 ; layerIdx > 0 ; layerIdx--) {
               MatrixMlpLayer currentLayer = (MatrixMlpLayer)matrixLayers[layerIdx];

               TransferFunction transferFunction = currentLayer.getTransferFunction();
               int neuronsCount = currentLayer.getNeuronsCount();

               double[] neuronErrors = currentLayer.getErrors();
               double[] netInputs = currentLayer.getNetInput();

               MatrixMlpLayer nextLayer = (MatrixMlpLayer)currentLayer.getNextLayer();
               double[] nextLayerErrors = nextLayer.getErrors();
               double[][] nextLayerWeights = nextLayer.getWeights();

               // calculate error for each neuron in current layer
               for(int neuronIdx = 0; neuronIdx < neuronsCount; neuronIdx++) {
                   // calculate weighted sum of errors of all neuron it is attached to - calculate how much this neuron is contributing to errors in next layer
                   double weightedErrorsSum = 0;

                   for(int nextLayerNeuronIdx = 0; nextLayerNeuronIdx < nextLayer.getNeuronsCount(); nextLayerNeuronIdx++) {
                     weightedErrorsSum += nextLayerErrors[nextLayerNeuronIdx] * nextLayerWeights[nextLayerNeuronIdx][neuronIdx];
                   }

                   // calculate the error for this neuron
                   neuronErrors[neuronIdx] = transferFunction.getDerivative(netInputs[neuronIdx]) * weightedErrorsSum;
               } // neuron iterator

               this.updateLayerWeights(currentLayer, neuronErrors);
              
            } // layer iterator
View Full Code Here

Examples of org.neuroph.core.transfer.TransferFunction

      double d = connection.getToNeuron().getError()
          * connection.getWeight().getValue();
      delta_sum += d; // weighted sum from the next layer
    } // for

    TransferFunction transferFunction = neuron.getTransferFunction();
    double netInput = neuron.getNetInput();
    double f1 = transferFunction.getDerivative(netInput);
    double delta = f1 * delta_sum;
    return delta;
  }
View Full Code Here

Examples of org.neuroph.core.transfer.TransferFunction

        neuron.setError(0);
                                i++;
        continue;
      }
     
      TransferFunction transferFunction = neuron.getTransferFunction();
      double neuronInput = neuron.getNetInput();
      double delta = outputError * transferFunction.getDerivative(neuronInput);
      neuron.setError(delta);
      this.updateNeuronWeights(neuron);       
      i++;
    } // for       
  }
View Full Code Here

Examples of org.neuroph.core.transfer.TransferFunction

  private static FlatLayer flattenLayer(Layer layer) {
    boolean inputLayer = false;
    Set<Class<?>> transferFunctions = new HashSet<Class<?>>();
    int neuronCount = 0;
    int biasCount = 0;
    TransferFunction transfer = null;

    for (Neuron neuron : layer.getNeurons()) {
      if (neuron.getClass() == InputNeuron.class)
        inputLayer = true;

      if (neuron.getClass() == Neuron.class
          || neuron.getClass() == InputNeuron.class) {
        neuronCount++;

        transfer = neuron.getTransferFunction();
        transferFunctions.add(transfer.getClass());
      } else if (neuron.getClass() == BiasNeuron.class)
        biasCount++;
    }

    if (transferFunctions.size() > 1)
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.