Package org.encog.neural.networks

Examples of org.encog.neural.networks.BasicNetwork


    return network;
  }
 
  public static void main(String args[])
  {
    BasicNetwork network = createNetwork();
   
    MLTrain train;
   
    if( args.length>0 && args[0].equalsIgnoreCase("anneal"))
    {
View Full Code Here


    // Graph<V, E> where V is the type of the vertices
    // and E is the type of the edges
    Graph<DrawnNeuron, DrawnConnection> g = null;
   
    if( method instanceof BasicNetwork ) {
      BasicNetwork network = (BasicNetwork)method;
      g = buildGraph(network.getStructure().getFlat());
    } else if( method instanceof NEATNetwork ) {
      NEATNetwork neat = (NEATNetwork)method;
      g = buildGraph(neat);
    }
   
View Full Code Here

      report.header("Context Target Size");
      report.header("Context Target Offset");
      report.header("Context Count");
      report.endRow();

      BasicNetwork network = (BasicNetwork) method;
      FlatNetwork flat = network.getStructure().getFlat();
      int layerCount = network.getLayerCount();

      for (int l = 0; l < layerCount; l++) {
        report.beginRow();
        StringBuilder str = new StringBuilder();
        str.append(Format.formatInteger(l + 1));
        if (l == 0) {
          str.append(" (Output)");
        } else if (l == network.getLayerCount() - 1) {
          str.append(" (Input)");
        }
        report.cell(str.toString());
        report.cell(Format.formatInteger(flat.getLayerCounts()[l]));
        report.cell(Format.formatInteger(flat.getLayerFeedCounts()[l]));
View Full Code Here

  }

  private void restructureFeedforward() {
    CreateFeedforward dialog = new CreateFeedforward(EncogWorkBench
        .getInstance().getMainWindow());
    BasicNetwork network = (BasicNetwork)method;

    ActivationFunction oldActivationOutput = network.getActivation(network
        .getLayerCount() - 1);
    dialog.setActivationFunctionOutput(oldActivationOutput);
    dialog.getInputCount().setValue(network.getInputCount());
    dialog.getOutputCount().setValue(network.getOutputCount());
    int hiddenLayerCount = network.getLayerCount() - 2;

    ActivationFunction oldActivationHidden = new ActivationTANH();
    for (int i = 0; i < hiddenLayerCount; i++) {
      int num = network.getLayerNeuronCount(i + 1);
      String str = "Hidden Layer " + (i + 1) + ": " + num + " neurons";
      dialog.getHidden().getModel().addElement(str);
    }
    dialog.setActivationFunctionHidden(oldActivationHidden);

    if (dialog.process()) {
      // decide if entire network is to be recreated
      if ((dialog.getActivationFunctionHidden() != oldActivationHidden)
          || (dialog.getActivationFunctionOutput() != oldActivationOutput)
          || dialog.getHidden().getModel().size() != (network
              .getLayerCount() - 2)) {
        FeedForwardPattern feedforward = new FeedForwardPattern();
        feedforward.setActivationFunction(dialog
            .getActivationFunctionHidden());
        feedforward.setInputNeurons(dialog.getInputCount().getValue());
        for (int i = 0; i < dialog.getHidden().getModel().size(); i++) {
          String str = (String) dialog.getHidden().getModel()
              .getElementAt(i);
          int i1 = str.indexOf(':');
          int i2 = str.indexOf("neur");
          if (i1 != -1 && i2 != -1) {
            str = str.substring(i1 + 1, i2).trim();
            int neuronCount = Integer.parseInt(str);
            feedforward.addHiddenLayer(neuronCount);
          }
        }
        feedforward.setInputNeurons(dialog.getInputCount().getValue());
        feedforward.setOutputNeurons(dialog.getOutputCount().getValue());
        BasicNetwork obj = (BasicNetwork) feedforward.generate();
      } else {
        // try to prune it
        PruneSelective prune = new PruneSelective(network);
        int newInputCount = dialog.getInputCount().getValue();
        int newOutputCount = dialog.getOutputCount().getValue();
View Full Code Here

  public static double XOR_IDEAL[][] = { { 0.0 }, { 1.0 }, { 1.0 }, { 0.0 } };

  public static void main(final String args[]) {

    final BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(null, true, 2));
    network.addLayer(new BasicLayer(new ActivationSigmoidPosNeg(), true, 4));
    network.addLayer(new BasicLayer(new ActivationSigmoidPosNeg(), true, 1));
    network.getStructure().finalizeStructure();
    network.reset();

    final MLDataSet trainingSet = new BasicMLDataSet(
        CustomActivation.XOR_INPUT, CustomActivation.XOR_IDEAL);

   
View Full Code Here

    if( output<=0 ) {
      throw new EncogError("Must have at least one output for feedforward.");
    }
   
   
    final BasicNetwork result = new BasicNetwork();
    final List<String> layers = ArchitectureParse.parseLayers(architecture);
    ActivationFunction af = new ActivationLinear();

    int questionPhase = 0;
    for (final String layerStr : layers) {
      int defaultCount;
      // determine default
      if (questionPhase == 0) {
        defaultCount = input;
      } else {
        defaultCount = output;
      }

      final ArchitectureLayer layer = ArchitectureParse.parseLayer(
          layerStr, defaultCount);
      final boolean bias = layer.isBias();

      String part = layer.getName();
      if (part != null) {
        part = part.trim();
      } else {
        part = "";
      }
     
      ActivationFunction lookup = this.factory.create(part);
     
      if (lookup!=null) {
        af = lookup;
      } else {
        if (layer.isUsedDefault()) {
          questionPhase++;
          if (questionPhase > 2) {
            throw new EncogError("Only two ?'s may be used.");
          }
        }

        if (layer.getCount() == 0) {
          throw new EncogError("Unknown architecture element: "
              + architecture + ", can't parse: " + part);
        }

        result.addLayer(new BasicLayer(af, bias,
            layer.getCount()));

      }
    }

    result.getStructure().finalizeStructure();
    result.reset();

    return result;
  }
View Full Code Here

  @Test
  public void testRPROP() throws Throwable
  {
    MLDataSet trainingData = XOR.createNoisyXORDataSet(10);
   
    BasicNetwork network = NetworkUtil.createXORNetworkUntrained();
   
    final FoldedDataSet folded = new FoldedDataSet(trainingData);
    final MLTrain train = new ResilientPropagation(network, folded);
    final CrossValidationKFold trainFolded = new CrossValidationKFold(train,4);
   
View Full Code Here

  public static void main(final String args[]) {

    final MLDataSet trainingSet = TrainingSetUtil.loadCSVTOMemory(
        CSVFormat.ENGLISH, "c:\\temp\\xor.csv", false, 2, 1);
    final BasicNetwork network = EncogUtility.simpleFeedForward(2, 4, 0, 1,
        true);

    System.out.println();
    System.out.println("Training Network");
    EncogUtility.trainToError(network, trainingSet, 0.01);
View Full Code Here

  public static void main(final String args[]) {
   
    Encog.getInstance().registerPlugin(new EncogOpenCLPlugin());
   
    // create a neural network, without using a factory
    BasicNetwork network = new BasicNetwork();
    network.addLayer(new BasicLayer(null,false,2));
    network.addLayer(new BasicLayer(new ActivationSigmoid(),true,3));
    network.addLayer(new BasicLayer(new ActivationSigmoid(),true,1));
    network.getStructure().finalizeStructure();
    network.reset();
    new ConsistentRandomizer(-1,1).randomize(network);

    // create training data
    MLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL);
    final MLTrain train = new ResilientPropagation(network, trainingSet);
View Full Code Here

    private BasicNetwork neuralNetwork=null;
    private MLDataSet trainingSet=null;
   
    private String filePath=null;
    public NeuralNetworkWrapper(int inputNeurons,int outputNeurons){
        neuralNetwork=new BasicNetwork();
        neuralNetwork.addLayer(new BasicLayer(null,true,inputNeurons));
  neuralNetwork.addLayer(new BasicLayer(new ActivationSigmoid(),true,ProgramConfig.NEURAL_HIDDEN_LAYER_LENGTH));
  neuralNetwork.addLayer(new BasicLayer(new ActivationSigmoid(),false,outputNeurons));
  neuralNetwork.getStructure().finalizeStructure();
  neuralNetwork.reset();
View Full Code Here

TOP

Related Classes of org.encog.neural.networks.BasicNetwork

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.