Package cc.mallet.types

Examples of cc.mallet.types.FeatureVectorSequence$Iterator


        for (int i : lattices.keySet()) {
          // skip if the instance doesn't have any constraints
          if (!constraintBits.get(i)) {
            continue;
          }
          FeatureVectorSequence fvs = (FeatureVectorSequence) ilist.get(i).getData();
          lattice = lattices.get(i);
          assert(lattice != null)
              : "Lattice is null:: " + i + ", size: " + lattices.size();

          // update the number of times this feature occurred in the sequence
View Full Code Here


          featureIndicesArr[index] = featureIndices.get(index);
        }
         fvs[l] = featureInductionOption.value ? new AugmentableFeatureVector(features, featureIndicesArr, null, featureIndicesArr.length) :
          new FeatureVector(features, featureIndicesArr);
      }
      carrier.setData(new FeatureVectorSequence(fvs));
      if (isTargetProcessing())
        carrier.setTarget(target);
      else
        carrier.setTarget(new LabelSequence(getTargetAlphabet()));
      return carrier;
View Full Code Here

  public void computeExpectations() {
    expectations.zero();

    // now, update the expectations due to each instance for entropy reg.
    for (int ii = 0; ii < data.size(); ii++) {
      FeatureVectorSequence input = (FeatureVectorSequence) data.get(ii).getData();
      SumLattice lattice = new SumLatticeDefault(crf,input, true);

      // udpate the expectations
      EntropyLattice entropyLattice = new EntropyLattice(
          input, lattice.getGammas(), lattice.getXis(), crf,
View Full Code Here

      for (int j = parameters.weights[i].numLocations()-1; j >= 0; j--)
        weightsPresent[i].set (parameters.weights[i].indexAtLocation(j));
    // Put in the weights in the training set
    for (int i = 0; i < trainingData.size(); i++) {
      Instance instance = trainingData.get(i);
      FeatureVectorSequence input = (FeatureVectorSequence) instance.getData();
      FeatureSequence output = (FeatureSequence) instance.getTarget();
      // gsc: trainingData can have unlabeled instances as well
      if (output != null && output.size() > 0) {
        // Do it for the paths consistent with the labels...
        sumLatticeFactory.newSumLattice (this, input, output, new Transducer.Incrementor() {
          public void incrementTransition (Transducer.TransitionIterator ti, double count) {
            State source = (CRF.State)ti.getSourceState();
            FeatureVector input = (FeatureVector)ti.getInput();
            int index = ti.getIndex();
            int nwi = source.weightsIndices[index].length;
            for (int wi = 0; wi < nwi; wi++) {
              int weightsIndex = source.weightsIndices[index][wi];
              for (int i = 0; i < input.numLocations(); i++) {
                int featureIndex = input.indexAtLocation(i);
                if ((globalFeatureSelection == null || globalFeatureSelection.contains(featureIndex))
                    && (featureSelections == null
                        || featureSelections[weightsIndex] == null
                        || featureSelections[weightsIndex].contains(featureIndex)))
                  weightsPresent[weightsIndex].set (featureIndex);
              }
            }
          }
          public void incrementInitialState (Transducer.State s, double count) {  }
          public void incrementFinalState (Transducer.State s, double count) {  }
        });
      }
      // ...and also do it for the paths selected by the current model (so we will get some negative weights)
      if (useSomeUnsupportedTrick && this.getParametersAbsNorm() > 0) {
        if (i == 0)
          logger.info ("CRF: Incremental training detected.  Adding weights for some unsupported features...");
        // (do this once some training is done)
        sumLatticeFactory.newSumLattice (this, input, null, new Transducer.Incrementor() {
          public void incrementTransition (Transducer.TransitionIterator ti, double count) {
            if (count < 0.2) // Only create features for transitions with probability above 0.2
              return// This 0.2 is somewhat arbitrary -akm
            State source = (CRF.State)ti.getSourceState();
            FeatureVector input = (FeatureVector)ti.getInput();
            int index = ti.getIndex();
            int nwi = source.weightsIndices[index].length;
            for (int wi = 0; wi < nwi; wi++) {
              int weightsIndex = source.weightsIndices[index][wi];
              for (int i = 0; i < input.numLocations(); i++) {
                int featureIndex = input.indexAtLocation(i);
                if ((globalFeatureSelection == null || globalFeatureSelection.contains(featureIndex))
                    && (featureSelections == null
                        || featureSelections[weightsIndex] == null
                        || featureSelections[weightsIndex].contains(featureIndex)))
                  weightsPresent[weightsIndex].set (featureIndex);
View Full Code Here

          featureIndicesArr[index] = featureIndices.get(index);
        }
         fvs[l] = featureInductionOption.value ? new AugmentableFeatureVector(features, featureIndicesArr, null, featureIndicesArr.length) :
          new FeatureVector(features, featureIndicesArr);
      }
      carrier.setData(new FeatureVectorSequence(fvs));
      if (isTargetProcessing())
        carrier.setTarget(target);
      else
        carrier.setTarget(new LabelSequence(getTargetAlphabet()));
      return carrier;
View Full Code Here

TOP

Related Classes of cc.mallet.types.FeatureVectorSequence$Iterator

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.