Examples of Svm


Examples of com.nr.ci.Svm

      data[i][1]=-1.0-(a+b);
    }
   
    // Linear kernel
    Svmlinkernel linkernel=new Svmlinkernel(data,y);
    Svm linsvm=new Svm(linkernel);
    lambda=10;
    k=0;
    do {
      test=linsvm.relax(lambda,omega);
//      System.out.printf(test);
      k++;
    } while (test > 1.e-3 && k < 100);
    int nerror=0;
    for (i=0;i<M;i++) {
//      if (i%10 == 0) System.out.printf((y[i]==1.0) << " %f\n", (linsvm.predict(i) >= 1.0));
//      if ((y[i] == 1.0) != (linsvm.predict(i) >= 1.0))
//        System.out.printf(data[i][0] << " %f\n", data[i][1] << " %f\n", y[i] << " %f\n", linsvm.predict(i));
      nerror += ((y[i]==1.0) != (linsvm.predict(i) >= 0.0) ? 1 : 0);
    }
    System.out.printf("Errors: %d\n", nerror);

    // Polynomial kernel
    Svmpolykernel polykernel=new Svmpolykernel(data,y,1.0,1.0,2.0);
    Svm polysvm=new Svm(polykernel);
    lambda=10;
    k=0;
    do {
      test=polysvm.relax(lambda,omega);
//      System.out.printf(test);
      k++;
    } while (test > 1.e-3 && k < 100);
    nerror=0;
    for (i=0;i<M;i++) {
      nerror += ((y[i]==1.0) != (polysvm.predict(i) >= 0.0) ? 1 : 0);
    }
    System.out.printf("Errors: %d\n", nerror);

    // Gaussian kernel
    Svmgausskernel gausskernel=new Svmgausskernel(data,y,1.0);
    Svm gausssvm=new Svm(gausskernel);
    lambda=10;
    k=0;
    do {
      test=gausssvm.relax(lambda,omega);
//      System.out.printf(test);
      k++;
    } while (test > 1.e-3 && k < 100);
    nerror=0;
    for (i=0;i<M;i++) {
      nerror += ((y[i]==1.0) != (gausssvm.predict(i) >= 0.0) ? 1 : 0);
    }
    System.out.printf("Errors: %d\n", nerror);

   
    // Need to add tests for harder test case and resolve issue that the two
    // support vectors give an erroneous indication for two of the kernels above

    // Example similar to the book
    Normaldev ndev=new Normaldev(0.0,0.5,17);
    for (j=0;j<4;j++) {   // Four quadrants
      for (i=0;i<M/4;i++) {
        k=(M/4)*j+i;
        if (j == 0) {
          y[k]=1.0;
          data[k][0]=1.0+ndev.dev();
          data[k][1]=1.0+ndev.dev();
        } else if (j == 1) {
          y[k]=-1.0;
          data[k][0]=-1.0+ndev.dev();
          data[k][1]=1.0+ndev.dev();
        } else if (j == 2) {
          y[k]=1.0;
          data[k][0]=-1.0+ndev.dev();
          data[k][1]=-1.0+ndev.dev();
        } else {
          y[k]=-1.0;
          data[k][0]=1.0+ndev.dev();
          data[k][1]=-1.0+ndev.dev();
        }
      }
    }
       
    // Linear kernel
    Svmlinkernel linkernel2=new Svmlinkernel(data,y);
    Svm linsvm2=new Svm(linkernel2);
    System.out.printf("Errors: ");
    for (lambda=0.001;lambda<10000;lambda *= 10) {
      k=0;
      do {
        test=linsvm2.relax(lambda,omega);
//        System.out.printf(test);
        k++;
      } while (test > 1.e-3 && k < 100);
      nerror=0;
      for (i=0;i<M;i++) {
        nerror += ((y[i]==1.0) != (linsvm2.predict(i) >= 0.0) ? 1 : 0);
      }
      System.out.printf("%d ",nerror);
      // Test new data
      nerror=0;
      for (j=0;j<4;j++) {   // Four quadrants
        for (i=0;i<M/4;i++) {
          if (j == 0) {
            yy=1.0;
            x[0]=1.0+ndev.dev();
            x[1]=1.0+ndev.dev();
          } else if (j == 1) {
            yy=-1.0;
            x[0]=-1.0+ndev.dev();
            x[1]=1.0+ndev.dev();
          } else if (j == 2) {
            yy=1.0;
            x[0]=-1.0+ndev.dev();
            x[1]=-1.0+ndev.dev();
          } else {
            yy=-1.0;
            x[0]=1.0+ndev.dev();
            x[1]=-1.0+ndev.dev();
          }
          nerror += ((yy==1.0) != (linsvm2.predict(x) >= 0.0) ? 1 : 0);
        }
      }
      System.out.printf("%d    ",nerror);
    }
    System.out.println();

    // Polynomial kernel
    Svmpolykernel polykernel2 = new Svmpolykernel(data,y,1.0,1.0,4.0);
    Svm polysvm2=new Svm(polykernel2);
    System.out.printf("Errors: ");
    for (lambda=0.001;lambda<10000;lambda *= 10) {
      k=0;
      do {
        test=polysvm2.relax(lambda,omega);
//        System.out.printf(test);
        k++;
      } while (test > 1.e-3 && k < 100);
      // Test training set
      nerror=0;
      for (i=0;i<M;i++) {
        nerror += ((y[i]==1.0) != (polysvm2.predict(i) >= 0.0) ? 1 : 0);
      }
      System.out.printf("%d ",nerror);
      // Test new data
      nerror=0;
      for (j=0;j<4;j++) {   // Four quadrants
        for (i=0;i<M/4;i++) {
          if (j == 0) {
            yy=1.0;
            x[0]=1.0+ndev.dev();
            x[1]=1.0+ndev.dev();
          } else if (j == 1) {
            yy=-1.0;
            x[0]=-1.0+ndev.dev();
            x[1]=1.0+ndev.dev();
          } else if (j == 2) {
            yy=1.0;
            x[0]=-1.0+ndev.dev();
            x[1]=-1.0+ndev.dev();
          } else {
            yy=-1.0;
            x[0]=1.0+ndev.dev();
            x[1]=-1.0+ndev.dev();
          }
          nerror += ((yy==1.0) != (polysvm2.predict(x) >= 0.0) ? 1 : 0);
        }
      }
      System.out.printf("%d    ",nerror);
    }
    System.out.println();

    // Gaussian kernel
    Svmgausskernel gausskernel2=new Svmgausskernel(data,y,1.0);
    Svm gausssvm2=new Svm(gausskernel2);
    System.out.printf("Errors: ");
    for (lambda=0.001;lambda<10000;lambda *= 10) {
      k=0;
      do {
        test=gausssvm2.relax(lambda,omega);
//        System.out.printf(test);
        k++;
      } while (test > 1.e-3 && k < 100);
      nerror=0;
      for (i=0;i<M;i++) {
        nerror += ((y[i]==1.0) != (gausssvm2.predict(i) >= 0.0) ? 1 : 0);
      }
      System.out.printf("%d ",nerror);
      // Test new data
      nerror=0;
      for (j=0;j<4;j++) {   // Four quadrants
        for (i=0;i<M/4;i++) {
          if (j == 0) {
            yy=1.0;
            x[0]=1.0+ndev.dev();
            x[1]=1.0+ndev.dev();
          } else if (j == 1) {
            yy=-1.0;
            x[0]=-1.0+ndev.dev();
            x[1]=1.0+ndev.dev();
          } else if (j == 2) {
            yy=1.0;
            x[0]=-1.0+ndev.dev();
            x[1]=-1.0+ndev.dev();
          } else {
            yy=-1.0;
            x[0]=1.0+ndev.dev();
            x[1]=-1.0+ndev.dev();
          }
          nerror += ((yy==1.0) != (gausssvm2.predict(x) >= 0.0) ? 1 : 0);
        }
      }
      System.out.printf("%d    ",nerror);
    }
    System.out.println();
View Full Code Here

Examples of org.encog.ml.svm.SVM

  public final File SERIAL_FILENAME = TEMP_DIR.createFile("encogtest.ser");
 
  private SVM create()
  {
    MLDataSet training = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
    SVM result = new SVM(2,SVMType.EpsilonSupportVectorRegression,KernelType.RadialBasisFunction);
    final SVMTrain train = new SVMTrain(result, training);
    train.iteration();
    return result;
  }
View Full Code Here

Examples of org.encog.ml.svm.SVM

    return result;
  }
 
  public void testPersistEG()
  {
    SVM network = create();

    EncogDirectoryPersistence.saveObject((EG_FILENAME), network);
    SVM network2 = (SVM)EncogDirectoryPersistence.loadObject((EG_FILENAME));
    validate(network2);
  }
View Full Code Here

Examples of org.encog.ml.svm.SVM

    validate(network2);
  }
 
  public void testPersistSerial() throws IOException, ClassNotFoundException
  {
    SVM network = create();
   
    SerializeObject.save(SERIAL_FILENAME, network);
    SVM network2 = (SVM)SerializeObject.load(SERIAL_FILENAME);
       
    validate(network2);
  }
View Full Code Here

Examples of org.encog.ml.svm.SVM

  @Override
  public final MLMethod generate() {
    if (this.outputNeurons != 1) {
      throw new PatternError("A SVM may only have one output.");
    }
    final SVM network = new SVM(this.inputNeurons, this.svmType,
        this.kernelType);
    return network;
  }
View Full Code Here

Examples of org.encog.ml.svm.SVM

    return temp.process(this.normalizedSunspots);
  }
 
  public SVM createNetwork()
  {
    SVM network = new SVM(WINDOW_SIZE,true);
    return network;
  }
View Full Code Here

Examples of org.encog.ml.svm.SVM

  }
 
  public void run()
  {
    normalizeSunspots(0.1,0.9);
    SVM network = createNetwork();
    MLDataSet training = generateTraining();
    train(network,training);
    predict(network);
   
  }
View Full Code Here

Examples of org.encog.ml.svm.SVM

  public void finishTraining() {
    stop();
  }

  private SVM generateSVM() {
    final SVM svm = new SVM(this.modelSVM.getInputCount(),
        this.modelSVM.getSVMType(), this.modelSVM.getKernelType());
    return svm;
  }
View Full Code Here

Examples of org.encog.ml.svm.SVM

   * This method creates, and trains, a SVM with the best const and gamma.
   * @return The best SVM.
   */
  @Override
  public MLMethod getMethod() {
    final SVM result = generateSVM();
    result.getParams().C = this.bestConst;
    result.getParams().gamma = this.bestGamma;
    result.setModel(svm.svm_train(this.problem, result.getParams()));
    return result;
  }
View Full Code Here

Examples of org.encog.ml.svm.SVM

  public Object requestNextTask() {
    if (this.done || getShouldStop()) {
      return null;
    }

    final SVM svm = generateSVM();

    // advance
    this.currentConst += this.constStep;
    if (this.currentConst > this.constEnd) {
      this.currentConst = this.constBegin;
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.