Package org.apache.lucene.analysis.tokenattributes

Examples of org.apache.lucene.analysis.tokenattributes.TermAttribute.termLength()


      writer.write('\t'); // edit: Inorder to match Hadoop standard
      // TextInputFormat
      TermAttribute termAtt = ts.addAttribute(TermAttribute.class);
      while (ts.incrementToken()) {
        char[] termBuffer = termAtt.termBuffer();
        int termLen = termAtt.termLength();
        writer.write(termBuffer, 0, termLen);
        writer.write(' ');
      }
    } finally {
      IOUtils.quietClose(reader);
View Full Code Here


   
    List<String> coll = new ArrayList<String>();
    TermAttribute termAtt = ts.addAttribute(TermAttribute.class);
    while (ts.incrementToken()) {
      char[] termBuffer = termAtt.termBuffer();
      int termLen = termAtt.termLength();
      String val = new String(termBuffer, 0, termLen);
      coll.add(val);
    }
    return coll.toArray(new String[coll.size()]);
  }
View Full Code Here

        TokenStream stream = analyzer.reusableTokenStream( fieldName, reader);
        TermAttribute attribute = (TermAttribute) stream.addAttribute( TermAttribute.class );
        stream.reset();

        while ( stream.incrementToken() ) {
          if ( attribute.termLength() > 0 ) {
            String term = attribute.term();
            terms.add( term );
          }
        }
        stream.end();
View Full Code Here

      TokenStream stream = analyzer.reusableTokenStream( fieldName, reader);
      TermAttribute attribute = (TermAttribute) stream.addAttribute( TermAttribute.class );
      stream.reset();

      while ( stream.incrementToken() ) {
        if ( attribute.termLength() > 0 ) {
          String term = attribute.term();
          terms.add( term );
        }
      }
      stream.end();
View Full Code Here

                                    PayloadAttribute payloadAttribute = tokenStream.addAttribute(PayloadAttribute.class);
                                    tokenStream.incrementToken();
                                    tokenStream.end();
                                    tokenStream.close();

                                    String value = new String(termAttribute.termBuffer(), 0, termAttribute.termLength());
                                    if (value.startsWith(namePrefix)) {
                                        // extract value
                                        String rawValue = value.substring(namePrefix.length());
                                        // create new named value
                                        Path p = getRelativePath(state, propState);
View Full Code Here

    TokenStream stream = analyzer.reusableTokenStream( fieldName, reader);
    TermAttribute attribute = (TermAttribute) stream.addAttribute( TermAttribute.class );
    stream.reset();

    while ( stream.incrementToken() ) {
      if ( attribute.termLength() > 0 ) {
        String term = attribute.term();
        terms.add( term );
      }
    }
    stream.end();
View Full Code Here

                                    PayloadAttribute payloadAttribute = tokenStream.addAttribute(PayloadAttribute.class);
                                    tokenStream.incrementToken();
                                    tokenStream.end();
                                    tokenStream.close();

                                    String value = new String(termAttribute.termBuffer(), 0, termAttribute.termLength());
                                    if (value.startsWith(namePrefix)) {
                                        // extract value
                                        value = value.substring(namePrefix.length());
                                        // create new named value
                                        Path p = getRelativePath(state, propState);
View Full Code Here

                                    PayloadAttribute payloadAttribute = tokenStream.addAttribute(PayloadAttribute.class);
                                    tokenStream.incrementToken();
                                    tokenStream.end();
                                    tokenStream.close();

                                    String value = new String(termAttribute.termBuffer(), 0, termAttribute.termLength());
                                    if (value.startsWith(namePrefix)) {
                                        // extract value
                                        value = value.substring(namePrefix.length());
                                        // create new named value
                                        Path p = getRelativePath(state, propState);
View Full Code Here

                           //t = field.tokenStreamValue().next(t);
                           field.tokenStreamValue().incrementToken();
                           TermAttribute term = field.tokenStreamValue().getAttribute(TermAttribute.class);
                           PayloadAttribute payload = field.tokenStreamValue().getAttribute(PayloadAttribute.class);

                           String value = new String(term.termBuffer(), 0, term.termLength());

                           if (value.startsWith(namePrefix))
                           {
                              // extract value
                              value = value.substring(namePrefix.length());
View Full Code Here

    TokenStream stream = analyzer.reusableTokenStream( fieldName, reader);
    TermAttribute attribute = (TermAttribute) stream.addAttribute( TermAttribute.class );
    stream.reset();

    while ( stream.incrementToken() ) {
      if ( attribute.termLength() > 0 ) {
        String term = attribute.term();
        terms.add( term );
      }
    }
    stream.end();
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.