Package org.encog.neural.networks

Examples of org.encog.neural.networks.BasicNetwork


    }
    if (hidden2 > 0) {
      pattern.addHiddenLayer(hidden2);
    }

    final BasicNetwork network = (BasicNetwork)pattern.generate();
    network.reset();
    return network;
  }
View Full Code Here


   */
  @Override
  public void randomize(final MLMethod method) {

    if (method instanceof BasicNetwork) {
      final BasicNetwork network = (BasicNetwork) method;
      for (int i = 0; i < network.getLayerCount() - 1; i++) {
        randomize(network, i);
      }
    } else if (method instanceof MLEncodable) {
      final MLEncodable encode = (MLEncodable) method;
      final double[] encoded = new double[encode.encodedArrayLength()];
View Full Code Here

   
    if( !(method instanceof BasicNetwork) ) {
      throw new EncogError("Ngyyen Widrow only works on BasicNetwork.");
    }
   
    BasicNetwork network = (BasicNetwork)method;

    new RangeRandomizer(getMin(), getMax()).randomize(network);

    int hiddenNeurons = 0;

    for(int i=1;i<network.getLayerCount()-1;i++)
    {
      hiddenNeurons+=network.getLayerNeuronCount(i);
    }

    // can't really do much, use regular randomization
    if (hiddenNeurons < 1) {
      return;
    }

    this.inputCount = network.getInputCount();
    this.beta = 0.7 * Math.pow(hiddenNeurons, 1.0 / network.getInputCount());

    super.randomize(network);
  }
View Full Code Here

  public static double XOR_IDEAL[][] = { { 0.0 }, { 1.0 }, { 1.0 }, { 0.0 } };

  public void trainAndSave() throws IOException {
    System.out.println("Training XOR network to under 1% error rate.");
    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(2));
    network.addLayer(new BasicLayer(2));
    network.addLayer(new BasicLayer(1));
    network.getStructure().finalizeStructure();
    network.reset();

    MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);

    // train the neural network
    final MLTrain train = new ResilientPropagation(network, trainingSet);

    do {
      train.iteration();
    } while (train.getError() > 0.009);

    double e = network.calculateError(trainingSet);
    System.out.println("Network traiined to error: " + e);

    System.out.println("Saving network");
    SerializeObject.save(new File(FILENAME), network);
  }
View Full Code Here

  public final static String SQL_UID = "xoruser";
  public final static String SQL_PWD = "xorpassword";
 
  public static void main(final String args[]) {
   
    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(2));
    network.addLayer(new BasicLayer(2));
    network.addLayer(new BasicLayer(1));
    network.getStructure().finalizeStructure();
    network.reset();

    MLDataSet trainingSet = new SQLNeuralDataSet(
        XORSQL.SQL,
        XORSQL.INPUT_SIZE,
        XORSQL.IDEAL_SIZE,
        XORSQL.SQL_DRIVER,
        XORSQL.SQL_URL,
        XORSQL.SQL_UID,
        XORSQL.SQL_PWD);
   
    // train the neural network
    final MLTrain train = new ResilientPropagation(network, trainingSet);
    // reset if improve is less than 1% over 5 cycles
    train.addStrategy(new RequiredImprovementStrategy(5));
   
    int epoch = 1;

    do {
      train.iteration();
      System.out
          .println("Epoch #" + epoch + " Error:" + train.getError());
      epoch++;
    } while(train.getError() > 0.01);

    // test the neural network
    System.out.println("Neural Network Results:");
    for(MLDataPair pair: trainingSet ) {
      final MLData output = network.compute(pair.getInput());
      System.out.println(pair.getInput().getData(0) + "," + pair.getInput().getData(1)
          + ", actual=" + output.getData(0) + ",ideal=" + pair.getIdeal().getData(0));
    }
  }
View Full Code Here

    SerializeObject.save(new File(FILENAME), network);
  }

  public void loadAndEvaluate() throws IOException, ClassNotFoundException {
    System.out.println("Loading network");
    BasicNetwork network = (BasicNetwork) SerializeObject.load(new File(FILENAME));
    MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
    double e = network.calculateError(trainingSet);
    System.out
        .println("Loaded network's error is(should be same as above): "
            + e);
  }
View Full Code Here

  public static final int HIDDEN_COUNT = 60;
  public static final int OUTPUT_COUNT = 20;
 
  public static BasicNetwork generateNetwork()
  {
    final BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(MultiBench.INPUT_COUNT));
    network.addLayer(new BasicLayer(MultiBench.HIDDEN_COUNT));
    network.addLayer(new BasicLayer(MultiBench.OUTPUT_COUNT));
    network.getStructure().finalizeStructure();
    network.reset();
    return network;
  }
View Full Code Here

    return diff;
  }
 
  public static void main(String args[])
  {
    BasicNetwork network = generateNetwork();
    MLDataSet data = generateTraining();
   
    double rprop = evaluateRPROP(network,data);
    double mprop = evaluateMPROP(network,data);
    double factor = rprop/mprop;
View Full Code Here

    int outputNeurons = DIGITS.length;
   
    ADALINEPattern pattern = new ADALINEPattern();
    pattern.setInputNeurons(inputNeurons);
    pattern.setOutputNeurons(outputNeurons);
    BasicNetwork network = (BasicNetwork)pattern.generate();
   
    // train it
    MLDataSet training = generateTraining();
    MLTrain train = new TrainAdaline(network,training,0.01);
   
    int epoch = 1;
    do {
      train.iteration();
      System.out
          .println("Epoch #" + epoch + " Error:" + train.getError());
      epoch++;
    } while(train.getError() > 0.01);
   
    //
    System.out.println("Error:" + network.calculateError(training));
   
    // test it
    for(int i=0;i<DIGITS.length;i++)
    {
      int output = network.winner(image2data(DIGITS[i]));
     
      for(int j=0;j<CHAR_HEIGHT;j++)
      {
        if( j==CHAR_HEIGHT-1 )
          System.out.println(DIGITS[i][j]+" -> "+output);
View Full Code Here

  public static final int OUTPUT_COUNT = 1;
  public static final int HIDDEN_COUNT = 20;
  public static final int ITERATIONS = 10;

  public static long BenchmarkEncog(double[][] input, double[][] output) {
    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(new ActivationSigmoid(), true,
        input[0].length));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), true,
        HIDDEN_COUNT));
    network.addLayer(new BasicLayer(new ActivationSigmoid(), false,
        output[0].length));
    network.getStructure().finalizeStructure();
    network.reset();

    MLDataSet trainingSet = new BasicMLDataSet(input, output);

    // train the neural network
    MLTrain train = new Backpropagation(network, trainingSet, 0.7, 0.7);
View Full Code Here

TOP

Related Classes of org.encog.neural.networks.BasicNetwork

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.