Package org.encog.util

Examples of org.encog.util.ParamsHolder


    } else {
      throw new NeuralNetworkError("Unknown model: "
          + pnnLayer.getName());
    }

    final ParamsHolder holder = new ParamsHolder(pnnLayer.getParams());

    final String kernelStr = holder.getString("KERNEL", false, "gaussian");
   
    if (kernelStr.equalsIgnoreCase("gaussian")) {
      kernel = PNNKernelType.Gaussian;
    } else if (kernelStr.equalsIgnoreCase("reciprocal")) {
      kernel = PNNKernelType.Reciprocal;
View Full Code Here


   */
  public final MLTrain create(final MLMethod method,
      final MLDataSet training, final String argsStr) {

    final Map<String, String> args = ArchitectureParse.parseParams(argsStr);
    final ParamsHolder holder = new ParamsHolder(args);

    final double learningRate = holder.getDouble(
        MLTrainFactory.PROPERTY_LEARNING_RATE, false, 0.1);

    return new ManhattanPropagation((BasicNetwork) method, training,
        learningRate);
  }
View Full Code Here

   */
  public MLTrain create(final MLMethod method,
      final MLDataSet training, final String argsStr) {

    final Map<String, String> args = ArchitectureParse.parseParams(argsStr);
    final ParamsHolder holder = new ParamsHolder(args);

    final int particles = holder.getInt(
        MLTrainFactory.PROPERTY_PARTICLES, false, 20);
   
    CalculateScore score = new TrainingSetScore(training);
    Randomizer randomizer = new NguyenWidrowRandomizer();
   
View Full Code Here

    } else {
      throw new NeuralNetworkError("Unknown model: "
          + pnnLayer.getName());
    }

    final ParamsHolder holder = new ParamsHolder(pnnLayer.getParams());

    final String kernelStr = holder.getString("KERNEL", false, "gaussian");
   
    if (kernelStr.equalsIgnoreCase("gaussian")) {
      kernel = PNNKernelType.Gaussian;
    } else if (kernelStr.equalsIgnoreCase("reciprocal")) {
      kernel = PNNKernelType.Reciprocal;
View Full Code Here

    }

    final CalculateScore score = new TrainingSetScore(training);

    final Map<String, String> args = ArchitectureParse.parseParams(argsStr);
    final ParamsHolder holder = new ParamsHolder(args);
    final double startTemp = holder.getDouble(
        MLTrainFactory.PROPERTY_TEMPERATURE_START, false, 10);
    final double stopTemp = holder.getDouble(
        MLTrainFactory.PROPERTY_TEMPERATURE_STOP, false, 2);

    final int cycles = holder.getInt(MLTrainFactory.CYCLES, false, 100);

    final MLTrain train = new NeuralSimulatedAnnealing(
        (BasicNetwork) method, score, startTemp, stopTemp, cycles);

    return train;
View Full Code Here

          "RPROP training cannot be used on a method of type: "
              + method.getClass().getName());
    }

    final Map<String, String> args = ArchitectureParse.parseParams(argsStr);
    final ParamsHolder holder = new ParamsHolder(args);
    final double initialUpdate = holder.getDouble(
        MLTrainFactory.PROPERTY_INITIAL_UPDATE, false,
        RPROPConst.DEFAULT_INITIAL_UPDATE);
    final double maxStep = holder.getDouble(
        MLTrainFactory.PROPERTY_MAX_STEP, false,
        RPROPConst.DEFAULT_MAX_STEP);

    return new ResilientPropagation((ContainsFlat) method, training,
        initialUpdate, maxStep);
View Full Code Here

    final double defaultGamma = 1.0 / ((SVM) method).getInputCount();
    final double defaultC = 1.0;

    final Map<String, String> args = ArchitectureParse.parseParams(argsStr);
    final ParamsHolder holder = new ParamsHolder(args);
    final double gamma = holder.getDouble(MLTrainFactory.PROPERTY_GAMMA,
        false, defaultGamma);
    final double c = holder.getDouble(MLTrainFactory.PROPERTY_C, false,
        defaultC);

    final SVMTrain result = new SVMTrain((SVM) method, training);
    result.setGamma(gamma);
    result.setC(c);
View Full Code Here

      throw new EncogError("Must have at least one output for EPL.");
    }
   
   
    final Map<String, String> args = ArchitectureParse.parseParams(architecture);
    final ParamsHolder holder = new ParamsHolder(args);
   
    final int populationSize = holder.getInt(
        MLMethodFactory.PROPERTY_POPULATION_SIZE, false, 1000);
    String variables = holder.getString("vars", false, "x");
    String funct = holder.getString("funct", false, null);
   
    EncogProgramContext context = new EncogProgramContext();
    StringTokenizer tok = new StringTokenizer(variables,",");
    while(tok.hasMoreElements()) {
      context.defineVariable(tok.nextToken());
View Full Code Here

    }

    final CalculateScore score = new TrainingSetScore(training);

    final Map<String, String> args = ArchitectureParse.parseParams(argsStr);
    final ParamsHolder holder = new ParamsHolder(args);
    final int populationSize = holder.getInt(
        MLTrainFactory.PROPERTY_POPULATION_SIZE, false, 5000);
   
    MLTrain train = new MLMethodGeneticAlgorithm(new MethodFactory(){
      @Override
      public MLMethod factor() {
View Full Code Here

      throw new EncogError("Must have at least one output for NEAT.");
    }
   
   
    final Map<String, String> args = ArchitectureParse.parseParams(architecture);
    final ParamsHolder holder = new ParamsHolder(args);
   
    final int populationSize = holder.getInt(
        MLMethodFactory.PROPERTY_POPULATION_SIZE, false, 1000);
   
    final int cycles = holder.getInt(
        MLMethodFactory.PROPERTY_CYCLES, false, NEATPopulation.DEFAULT_CYCLES);
   
    ActivationFunction af = this.factory.create(
        holder.getString(MLMethodFactory.PROPERTY_AF, false, MLActivationFactory.AF_SSIGMOID));

    NEATPopulation pop = new NEATPopulation(input,output,populationSize);
    pop.reset();
    pop.setActivationCycles(cycles);
    pop.setNEATActivationFunction(af);
View Full Code Here

TOP

Related Classes of org.encog.util.ParamsHolder

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.