Package de.bwaldvogel.liblinear

Examples of de.bwaldvogel.liblinear.FeatureNode


      i++;
    }
    FeatureNode[] xarray = new FeatureNode[featureSet.size()];
    int k = 0;
    for (XNode x : featureSet) {
      xarray[k++] = new FeatureNode(x.getIndex(), x.getValue());
    }


    if (decision.getKBestList().getK() == 1) {
      decision.getKBestList().add((int) Linear.predict(model, xarray));
View Full Code Here


            }
          }
          problem.x[i] = new FeatureNode[featureSet.size()];
          int p = 0;
          for (XNode x : featureSet) {
            problem.x[i][p++] = new FeatureNode(x.getIndex(), x.getValue());
          }
          featureSet.clear();
          i++;
        } catch (ArrayIndexOutOfBoundsException e) {
          throw new LiblinearException("Cannot read from the instance file. ", e);
View Full Code Here

          for(j = 1; j < columns.length; j++) {
            final String[] items = pipePattern.split(columns[j])
            for (int k = 0; k < items.length; k++) {
              try {
                if (Integer.parseInt(items[k]) != -1) {
                  xlist.add(p, new FeatureNode(Integer.parseInt(items[k])+offset, 1));
                  p++;
                }
              } catch (NumberFormatException e) {
                throw new LiblinearException("The instance file contain a non-integer value '"+items[k]+"'", e);
              }
View Full Code Here

          problem.y[i] = y;
          problem.x[i] = new FeatureNode[featureList.size()];
          int p = 0;
              for (int k=0; k < featureList.size(); k++) {
                MaltFeatureNode x = featureList.get(k);
            problem.x[i][p++] = new FeatureNode(x.getIndex(), x.getValue());
          }
          i++;
        } catch (ArrayIndexOutOfBoundsException e) {
          throw new LibException("Couldn't read liblinear problem from the instance file. ", e);
        }
View Full Code Here

    double[] values = vector.getValues();
   
    for (int indexpos = 0; indexpos < indices.length; indexpos++) {
      int index = indices[indexpos];
      if (index <= nr_feature) {
        FeatureNode node = new FeatureNode(index, values[indexpos]);
        x.add(node);
      }
    }
   
    if (liblinearModel.getBias() >= 0) {
      FeatureNode node = new FeatureNode(n, liblinearModel.getBias());
      x.add(node);
    }
   
    FeatureNode[] nodes = new FeatureNode[x.size()];
    nodes = x.toArray(nodes);
View Full Code Here

      Set<Integer> indices = featureVectors[i].getIndices();
      nodes[i] = new FeatureNode[(bias >= 0) ? indices.size() + 1 : indices.size()];

      int j = 0;
      for (int index : indices) {
        nodes[i][j] = new FeatureNode(index, featureVectors[i].getValue(index));
        j++;
      }
      if (bias >= 0) {
        nodes[i][j] = new FeatureNode(numberOfFeatures, bias);
      }
    } 
    return nodes; 
  }
View Full Code Here

    for (int i = 0; i < featureVectors.length; i++) {
      Set<Integer> indices = featureVectors[i].getIndices();
      prob.x[i] = new FeatureNode[(bias >= 0) ? indices.size() + 1 : indices.size()];
      int j = 0;
      for (int index : indices) {
        prob.x[i][j] = new FeatureNode(index, featureVectors[i].getValue(index));   
        maxIndex = Math.max(maxIndex, index);
        j++;
      }
    }

    if (bias >= 0) {
      maxIndex++;
      for (int i = 0; i < featureVectors.length; i++) {
        prob.x[i][prob.x[i].length - 1] = new FeatureNode(maxIndex, bias);
      }
    }

    prob.n    = maxIndex;
    prob.bias = (bias >= 0) ? 1 : -1;
View Full Code Here

    double[] values = vector.getValues();

    for (int indexpos = 0; indexpos < indices.length; indexpos++) {
      int index = indices[indexpos];
      if (index <= nr_feature) {
        FeatureNode node = new FeatureNode(index, values[indexpos]);
        x.add(node);
      }
    }

    if (liblinearModel.getBias() >= 0) {
      FeatureNode node = new FeatureNode(n, liblinearModel.getBias());
      x.add(node);
    }

    FeatureNode[] nodes = new FeatureNode[x.size()];
    nodes = x.toArray(nodes);
View Full Code Here

            double val = instance.valueSparse(i);

            if (idx == instance.classIndex()) continue;
            if (val == 0) continue;

            nodes[index] = new FeatureNode(idx + 1, val);
            index++;
        }

        // add bias term
        if (m_Bias >= 0) {
            nodes[index] = new FeatureNode(instance.numAttributes() + 1, m_Bias);
        }

        return nodes;
    }
View Full Code Here

TOP

Related Classes of de.bwaldvogel.liblinear.FeatureNode

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.