Package cc.mallet.fst

Examples of cc.mallet.fst.CRFTrainerByLabelLikelihood


     InstanceList testing = new InstanceList (pipe);
     testing.addThruPipe (new ArrayIterator (data1));

     CRF crf = new CRF (pipe, null);
     crf.addFullyConnectedStatesForLabels ();
     CRFTrainerByLabelLikelihood crft = new CRFTrainerByLabelLikelihood (crf);
     crft.trainIncremental (training);

     CRFExtractor extor = TestLatticeViewer.hackCrfExtor (crf);
     Extraction extraction = extor.extract (new ArrayIterator (data1));

     if (!outputDir.exists ()) outputDir.mkdir ();
View Full Code Here


    InstanceList testing = new InstanceList (pipe);
    testing.addThruPipe (new ArrayIterator (data1));

    CRF crf = new CRF (pipe, null);
    crf.addFullyConnectedStatesForLabels ();
    CRFTrainerByLabelLikelihood crft = new CRFTrainerByLabelLikelihood (crf);
    crft.trainIncremental (training);

    CRFExtractor extor = hackCrfExtor (crf);
    Extraction extration = extor.extract (new ArrayIterator (data1));

    PrintStream out = new PrintStream (new FileOutputStream (htmlFile));
View Full Code Here

    InstanceList testing = new InstanceList (pipe);
    testing.addThruPipe (new ArrayIterator (data1));

    CRF crf = new CRF (pipe, null);
    crf.addFullyConnectedStatesForLabels ();
    CRFTrainerByLabelLikelihood crft = new CRFTrainerByLabelLikelihood (crf);
    TokenAccuracyEvaluator eval = new TokenAccuracyEvaluator (new InstanceList[] {training, testing}, new String[] {"Training", "Testing"});
    for (int i = 0; i < 5; i++) {
      crft.train (training, 1);
      eval.evaluate(crft);
    }

    CRFExtractor extor = hackCrfExtor (crf);
    Extraction e1 = extor.extract (new ArrayIterator (data1));
View Full Code Here

    System.err.println(unlabeledSet.size());
    if (initSupervised) {
     
      // train supervised
      if (numThreads == 1) {
        CRFTrainerByLabelLikelihood trainer = new CRFTrainerByLabelLikelihood(crf);
        trainer.setAddNoFactors(true);
        trainer.setGaussianPriorVariance(gpv);
        trainer.train(trainingSet,supIterations);
      }
      else {
        CRFTrainerByThreadedLabelLikelihood trainer = new CRFTrainerByThreadedLabelLikelihood(crf,numThreads);
        trainer.setAddNoFactors(true);
        trainer.setGaussianPriorVariance(gpv);
        trainer.train(trainingSet,supIterations);
        trainer.shutdown();
      }
      runEvaluators();
    }
   
    // train semi-supervised
View Full Code Here

    CRF crf = new CRF(inputAlphabet, outputAlphabet);
    String[] stateNames = new String[numStates];
    for (int i = 0; i < numStates; i++)
      stateNames[i] = "state" + i;
    crf.addFullyConnectedStates(stateNames);
    CRFTrainerByLabelLikelihood crft = new CRFTrainerByLabelLikelihood(crf);
    Optimizable.ByGradientValue mcrf = crft
        .getOptimizableCRF(new InstanceList(null));
    TestOptimizable.testGetSetParameters(mcrf);
  }
View Full Code Here

    InstanceList ilist = new InstanceList(new Noop(inputAlphabet,
        outputAlphabet));
    ilist.add(fvs, ss, null, null);

    crf.addFullyConnectedStates(stateNames);
    CRFTrainerByLabelLikelihood crft = new CRFTrainerByLabelLikelihood(crf);
    crft.setUseSparseWeights(false);

    if (useSave) {
      try {
        ObjectOutputStream oos = new ObjectOutputStream(
            new FileOutputStream(f));
        oos.writeObject(crf);
        oos.close();
      } catch (IOException e) {
        System.err.println("Exception writing file: " + e);
      }
      System.err.println("Wrote out CRF");
      System.err.println("CRF parameters. hyperbolicPriorSlope: "
          + crft.getUseHyperbolicPriorSlope()
          + ". hyperbolicPriorSharpness: "
          + crft.getUseHyperbolicPriorSharpness()
          + ". gaussianPriorVariance: "
          + crft.getGaussianPriorVariance());
      // And read it back in
      crf = null;
      try {
        ObjectInputStream ois = new ObjectInputStream(
            new FileInputStream(f));
        crf = (CRF) ois.readObject();
        ois.close();
      } catch (IOException e) {
        System.err.println("Exception reading file: " + e);
      } catch (ClassNotFoundException cnfe) {
        System.err.println("Cound not find class reading in object: "
            + cnfe);
      }
      System.err.println("Read in CRF.");
      System.err.println("CRF parameters. hyperbolicPriorSlope: "
          + crft.getUseHyperbolicPriorSlope()
          + ". hyperbolicPriorSharpness: "
          + crft.getUseHyperbolicPriorSharpness()
          + ". gaussianPriorVariance: "
          + crft.getGaussianPriorVariance());

      try {
        ObjectOutputStream oos = new ObjectOutputStream(
            new FileOutputStream(f2));
        oos.writeObject(crf);
        oos.close();
      } catch (IOException e) {
        System.err.println("Exception writing file: " + e);
      }
      System.err.println("Wrote out CRF");
      crf = saveCRF;
    }
    Optimizable.ByGradientValue mcrf = crft.getOptimizableCRF(ilist);
    double unconstrainedWeight = new SumLatticeDefault(crf, fvs)
        .getTotalWeight();
    double constrainedWeight = new SumLatticeDefault(crf, fvs, ss)
        .getTotalWeight();
    double optimizableValue = 0, gradientNorm = 0;
View Full Code Here

    instances.addThruPipe(new ArrayIterator(data));
    InstanceList[] lists = instances.split(new Random(1), new double[] {
        .5, .5 });
    CRF crf = new CRF(p, p2);
    crf.addFullyConnectedStatesForLabels();
    CRFTrainerByLabelLikelihood crft = new CRFTrainerByLabelLikelihood(crf);
    if (testValueAndGradient) {
      Optimizable.ByGradientValue optable = crft
          .getOptimizableCRF(lists[0]);
      // TestOptimizable.testValueAndGradient(minable);
      double[] gradient = new double[optable.getNumParameters()];
      optable.getValueGradient(gradient);
      // TestOptimizable.testValueAndGradientInDirection(optable,
      // gradient);
      // TestOptimizable.testValueAndGradientCurrentParameters(optable);
      TestOptimizable.testValueAndGradient(optable); // This tests at
      // current
      // parameters and at
      // parameters
      // purturbed toward
      // the gradient
    } else {
      System.out.println("Training Accuracy before training = "
          + crf.averageTokenAccuracy(lists[0]));
      System.out.println("Testing  Accuracy before training = "
          + crf.averageTokenAccuracy(lists[1]));
      System.out.println("Training...");
      crft.trainIncremental(lists[0]);
      System.out.println("Training Accuracy after training = "
          + crf.averageTokenAccuracy(lists[0]));
      System.out.println("Testing  Accuracy after training = "
          + crf.averageTokenAccuracy(lists[1]));
      System.out.println("Training results:");
View Full Code Here

    InstanceList instances = new InstanceList(p);
    instances.addThruPipe(new ArrayIterator(data));
    InstanceList[] lists = instances.split(new double[] { .5, .5 });
    CRF crf = new CRF(p.getDataAlphabet(), p.getTargetAlphabet());
    crf.addFullyConnectedStatesForLabels();
    CRFTrainerByLabelLikelihood crft = new CRFTrainerByLabelLikelihood(crf);
    crft.setUseSparseWeights(useSparseWeights);
    if (testValueAndGradient) {
      Optimizable.ByGradientValue minable = crft
          .getOptimizableCRF(lists[0]);
      TestOptimizable.testValueAndGradient(minable);
    } else {
      System.out.println("Training Accuracy before training = "
          + crf.averageTokenAccuracy(lists[0]));
      System.out.println("Testing  Accuracy before training = "
          + crf.averageTokenAccuracy(lists[1]));
      savedCRF = crf;
      System.out.println("Training serialized crf.");
      crft.trainIncremental(lists[0]);
      double preTrainAcc = crf.averageTokenAccuracy(lists[0]);
      double preTestAcc = crf.averageTokenAccuracy(lists[1]);
      System.out.println("Training Accuracy after training = "
          + preTrainAcc);
      System.out.println("Testing  Accuracy after training = "
          + preTestAcc);
      try {
        ObjectOutputStream oos = new ObjectOutputStream(
            new FileOutputStream(f));
        oos.writeObject(crf);
        oos.close();
      } catch (IOException e) {
        System.err.println("Exception writing file: " + e);
      }
      System.err.println("Wrote out CRF");
      System.err.println("CRF parameters. hyperbolicPriorSlope: "
          + crft.getUseHyperbolicPriorSlope()
          + ". hyperbolicPriorSharpness: "
          + crft.getUseHyperbolicPriorSharpness()
          + ". gaussianPriorVariance: "
          + crft.getGaussianPriorVariance());
      // And read it back in
      if (useSaved) {
        crf = null;
        try {
          ObjectInputStream ois = new ObjectInputStream(
View Full Code Here

    // that having more features leads to a higher likelihood

    CRF crf1 = new CRF(p.getDataAlphabet(), p.getTargetAlphabet());
    crf1.addOrderNStates(lists[0], new int[] { 1, },
        new boolean[] { false, }, "START", null, null, false);
    new CRFTrainerByLabelLikelihood(crf1).trainIncremental(lists[0]);

    CRF crf2 = new CRF(p.getDataAlphabet(), p.getTargetAlphabet());
    crf2.addOrderNStates(lists[0], new int[] { 1, 2, }, new boolean[] {
        false, true }, "START", null, null, false);
    new CRFTrainerByLabelLikelihood(crf2).trainIncremental(lists[0]);

    CRF crf3 = new CRF(p.getDataAlphabet(), p.getTargetAlphabet());
    crf3.addOrderNStates(lists[0], new int[] { 1, 2, }, new boolean[] {
        false, false }, "START", null, null, false);
    new CRFTrainerByLabelLikelihood(crf3).trainIncremental(lists[0]);

    // Prevent cached values
    double lik1 = getLikelihood(crf1, lists[0]);
    double lik2 = getLikelihood(crf2, lists[0]);
    double lik3 = getLikelihood(crf3, lists[0]);
View Full Code Here

    assertEquals(-165.81326484466342, lik2, 0.0001);
    assertEquals(-90.37680146432787, lik3, 0.0001);
  }

  double getLikelihood(CRF crf, InstanceList data) {
    CRFTrainerByLabelLikelihood crft = new CRFTrainerByLabelLikelihood(crf);
    Optimizable.ByGradientValue mcrf = crft.getOptimizableCRF(data);
    // Do this elaborate thing so that crf.cachedValueStale is forced true
    double[] params = new double[mcrf.getNumParameters()];
    mcrf.getParameters(params);
    mcrf.setParameters(params);
    return mcrf.getValue();
View Full Code Here

TOP

Related Classes of cc.mallet.fst.CRFTrainerByLabelLikelihood

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.