Examples of fillBytesRef()


Examples of org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute.fillBytesRef()

      try (TokenStream ts = analyzer.tokenStream("fake", term)) {
        TermToBytesRefAttribute termAtt = ts.addAttribute(TermToBytesRefAttribute.class);
        BytesRef bytes = termAtt.getBytesRef();
        ts.reset();
        assertTrue(ts.incrementToken());
        termAtt.fillBytesRef();
        // ensure we make a copy of the actual bytes too
        map.put(term, BytesRef.deepCopyOf(bytes));
        assertFalse(ts.incrementToken());
        ts.end();
      }
View Full Code Here

Examples of org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute.fillBytesRef()

              try (TokenStream ts = analyzer.tokenStream("fake", term)) {
                TermToBytesRefAttribute termAtt = ts.addAttribute(TermToBytesRefAttribute.class);
                BytesRef bytes = termAtt.getBytesRef();
                ts.reset();
                assertTrue(ts.incrementToken());
                termAtt.fillBytesRef();
                assertEquals(expected, bytes);
                assertFalse(ts.incrementToken());
                ts.end();
              }
            }
View Full Code Here

Examples of org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute.fillBytesRef()

      OffsetAttribute offsetAtt = stream.addAttribute(OffsetAttribute.class);
      BytesRef ref = termAtt.getBytesRef();
      stream.reset();
     
      while (stream.incrementToken()) {
        termAtt.fillBytesRef();
//        if (DEBUG) System.err.println("token='" + term + "'");
        numTokens++;
        final int posIncr = posIncrAttribute.getPositionIncrement();
        if (posIncr == 0)
          numOverlapTokens++;
View Full Code Here

Examples of org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute.fillBytesRef()

      TermToBytesRefAttribute termAtt = source.getAttribute(TermToBytesRefAttribute.class);
      BytesRef bytes = termAtt.getBytesRef();

      if (!source.incrementToken())
        throw new IllegalArgumentException("analyzer returned no terms for multiTerm term: " + part);
      termAtt.fillBytesRef();
      if (source.incrementToken())
        throw new IllegalArgumentException("analyzer returned too many terms for multiTerm term: " + part);
      source.end();
      return BytesRef.deepCopyOf(bytes);
    } catch (IOException e) {
View Full Code Here

Examples of org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute.fillBytesRef()

      // last 1gram, last 2gram, etc.:
      BytesRef tokenBytes = termBytesAtt.getBytesRef();
      int maxEndOffset = -1;
      boolean sawRealToken = false;
      while(ts.incrementToken()) {
        termBytesAtt.fillBytesRef();
        sawRealToken |= tokenBytes.length > 0;
        // TODO: this is somewhat iffy; today, ShingleFilter
        // sets posLen to the gram count; maybe we should make
        // a separate dedicated att for this?
        int gramCount = posLenAtt.getPositionLength();
View Full Code Here

Examples of org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute.fillBytesRef()

        int count = 0;
        boolean changed = false;

        while(ts.incrementToken()) {
          termAtt.fillBytesRef();
          if (count == 0 && !termBytes.utf8ToString().equals(s)) {
            // The value was changed during analysis.  Keep iterating so the
            // tokenStream is exhausted.
            changed = true;
          }
View Full Code Here

Examples of org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute.fillBytesRef()

      TokenStream ts = analyzer.tokenStream("fake", term);
      TermToBytesRefAttribute termAtt = ts.addAttribute(TermToBytesRefAttribute.class);
      BytesRef bytes = termAtt.getBytesRef();
      ts.reset();
      assertTrue(ts.incrementToken());
      termAtt.fillBytesRef();
      // ensure we make a copy of the actual bytes too
      map.put(term, BytesRef.deepCopyOf(bytes));
    }
   
    Thread threads[] = new Thread[numThreads];
View Full Code Here

Examples of org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute.fillBytesRef()

              TokenStream ts = analyzer.tokenStream("fake", term);
              TermToBytesRefAttribute termAtt = ts.addAttribute(TermToBytesRefAttribute.class);
              BytesRef bytes = termAtt.getBytesRef();
              ts.reset();
              assertTrue(ts.incrementToken());
              termAtt.fillBytesRef();
              assertEquals(expected, bytes);
            }
          } catch (IOException e) {
            throw new RuntimeException(e);
          }
View Full Code Here

Examples of org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute.fillBytesRef()

      // reset the TokenStream to the first token
      stream.reset();

      TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class);
      while(stream.incrementToken()) {
        termAtt.fillBytesRef();
        tokenCount++;
      }
    }
    totalTokenCount += tokenCount;
    return tokenCount;
View Full Code Here

Examples of org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute.fillBytesRef()

      OffsetAttribute offsetAtt = stream.addAttribute(OffsetAttribute.class);
      BytesRef ref = termAtt.getBytesRef();
      stream.reset();
     
      while (stream.incrementToken()) {
        termAtt.fillBytesRef();
//        if (DEBUG) System.err.println("token='" + term + "'");
        numTokens++;
        final int posIncr = posIncrAttribute.getPositionIncrement();
        if (posIncr == 0)
          numOverlapTokens++;
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.