Examples of reset()


Examples of org.apache.lucene.analysis.NumericTokenStream.reset()

        final NumericTermAttribute numTerm1 = ts1.addAttribute(NumericTermAttribute.class);
        final NumericTermAttribute numTerm2 = ts1.addAttribute(NumericTermAttribute.class);
        final PositionIncrementAttribute posInc1 = ts1.addAttribute(PositionIncrementAttribute.class);
        final PositionIncrementAttribute posInc2 = ts1.addAttribute(PositionIncrementAttribute.class);
        ts1.reset();
        ts2.reset();
        while (ts1.incrementToken()) {
            assertThat(ts2.incrementToken(), is(true));
            assertThat(posInc1, equalTo(posInc2));
            // can't use equalTo directly on the numeric attribute cause it doesn't implement equals (LUCENE-5070)
            assertThat(numTerm1.getRawValue(), equalTo(numTerm2.getRawValue()));
View Full Code Here

Examples of org.apache.lucene.analysis.TeeSinkTokenFilter.reset()

   
    boolean seenDogs = false;

    TermAttribute termAtt = ttf.addAttribute(TermAttribute.class);
    TypeAttribute typeAtt = ttf.addAttribute(TypeAttribute.class);
    ttf.reset();
    while (ttf.incrementToken()) {
      if (termAtt.term().equals("dogs")) {
        seenDogs = true;
        assertTrue(typeAtt.type() + " is not equal to " + "D", typeAtt.type().equals("D") == true);
      } else {
View Full Code Here

Examples of org.apache.lucene.analysis.TeeSinkTokenFilter.SinkTokenStream.reset()

      }
    }
    assertTrue(seenDogs + " does not equal: " + true, seenDogs == true);
   
    int sinkCount = 0;
    sink.reset();
    while (sink.incrementToken()) {
      sinkCount++;
    }

    assertTrue("sink Size: " + sinkCount + " is not: " + 1, sinkCount == 1);
View Full Code Here

Examples of org.apache.lucene.analysis.TokenStream.reset()

          System.out.println("Tokens for '" + asString + "':");
          while (stream.incrementToken()) {
            System.out.println(" '" + termAtt.term() + "'");
          }
          stream.reset();
          return stream;
          // Do the call a second time and return the result this time
          // Old behaviour
          // return nestedAnalyzer.tokenStream(fieldName, new StringReader(asString));
        } catch (IOException exc) {
View Full Code Here

Examples of org.apache.lucene.analysis.Tokenizer.reset()

    Tokenizer tokenizer = (Tokenizer) getPreviousTokenStream();
    if (tokenizer == null) {
      tokenizer = new SimpleTokenizer(reader);
      setPreviousTokenStream(tokenizer);
    } else {
      tokenizer.reset(reader);
    }
    return tokenizer;
  }
}
View Full Code Here

Examples of org.apache.lucene.analysis.WhitespaceTokenizer.reset()

      new String[]{"please","please divide","divide","divide this","this","this sentence","sentence"},
      new int[]{0,0,7,7,14,14,19}, new int[]{6,13,13,18,18,27,27},
      new String[]{TypeAttributeImpl.DEFAULT_TYPE,"shingle",TypeAttributeImpl.DEFAULT_TYPE,"shingle",TypeAttributeImpl.DEFAULT_TYPE,"shingle",TypeAttributeImpl.DEFAULT_TYPE},
      new int[]{1,0,1,0,1,0,1}
    );
    wsTokenizer.reset(new StringReader("please divide this sentence"));
    assertTokenStreamContents(filter,
      new String[]{"please","please divide","divide","divide this","this","this sentence","sentence"},
      new int[]{0,0,7,7,14,14,19}, new int[]{6,13,13,18,18,27,27},
      new String[]{TypeAttributeImpl.DEFAULT_TYPE,"shingle",TypeAttributeImpl.DEFAULT_TYPE,"shingle",TypeAttributeImpl.DEFAULT_TYPE,"shingle",TypeAttributeImpl.DEFAULT_TYPE},
      new int[]{1,0,1,0,1,0,1}
View Full Code Here

Examples of org.apache.lucene.analysis.cn.smart.SentenceTokenizer.reset()

                + "JIRA issue about this.");
        }
        //first the sentences
        TokenStream sentences = new SentenceTokenizer(new CharSequenceReader(at.getText()));
        try {
          sentences.reset();
            while(sentences.incrementToken()){
                OffsetAttribute offset = sentences.addAttribute(OffsetAttribute.class);
                Sentence s = at.addSentence(offset.startOffset(), offset.endOffset());
                if(log.isTraceEnabled()) {
                    log.trace("detected {}:{}",s,s.getSpan());
View Full Code Here

Examples of org.apache.lucene.analysis.cn.smart.WordTokenFilter.reset()

            }
        }
        //now the tokens
        TokenStream tokens = new WordTokenFilter(new AnalyzedTextSentenceTokenizer(at));
        try {
          tokens.reset();
            while(tokens.incrementToken()){
                OffsetAttribute offset = tokens.addAttribute(OffsetAttribute.class);
                Token t = at.addToken(offset.startOffset(), offset.endOffset());
                log.trace("detected {}",t);
            }
View Full Code Here

Examples of org.apache.lucene.analysis.core.KeywordTokenizer.reset()

    final int maxGram = _TestUtil.nextInt(random(), minGram, 10);
    TokenStream tk = new KeywordTokenizer(new StringReader(s));
    tk = new NGramTokenFilter(TEST_VERSION_CURRENT, tk, minGram, maxGram);
    final CharTermAttribute termAtt = tk.addAttribute(CharTermAttribute.class);
    final OffsetAttribute offsetAtt = tk.addAttribute(OffsetAttribute.class);
    tk.reset();
    for (int start = 0; start < codePointCount; ++start) {
      for (int end = start + minGram; end <= Math.min(codePointCount, start + maxGram); ++end) {
        assertTrue(tk.incrementToken());
        assertEquals(0, offsetAtt.startOffset());
        assertEquals(s.length(), offsetAtt.endOffset());
View Full Code Here

Examples of org.apache.lucene.analysis.core.LetterTokenizer.reset()

  public void testGraphs() throws IOException {
    TokenStream tk = new LetterTokenizer(TEST_VERSION_CURRENT, new StringReader("abc d efgh ij klmno p q"));
    tk = new ShingleFilter(tk);
    tk = new EdgeNGramTokenFilter(TEST_VERSION_CURRENT, tk, 7, 10);
    tk.reset();
    assertTokenStreamContents(tk,
        new String[] { "efgh ij", "ij klmn", "ij klmno", "klmno p" },
        new int[]    { 6,11,11,14 },
        new int[]    { 13,19,19,21 },
        new int[]    { 3,1,0,1 },
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.