Examples of reset()


Examples of org.apache.lucene.analysis.icu.segmentation.ICUTokenizer.reset()

        int lastAdded = -1;
        int lastOffset = 0;
        boolean foundWildcard = false;
        //Lucene tokenizer are really low level ...
        try {
          tokenizer.reset(); //starting with Solr4 reset MUST BE called before using
            while(tokenizer.incrementToken()){
                //only interested in the start/end indexes of tokens
                OffsetAttribute offset = tokenizer.addAttribute(OffsetAttribute.class);
                if(lastAdded < 0){ //rest with this token
                    lastAdded = offset.startOffset();
View Full Code Here

Examples of org.apache.lucene.analysis.payloads.DelimitedPayloadTokenFilter.reset()

    ResourceLoader loader = new StringMockResourceLoader("solr/collection1");
    factory.inform(loader);

    TokenStream input = new MockTokenizer(new StringReader("the|0.1 quick|0.1 red|0.1"), MockTokenizer.WHITESPACE, false);
    DelimitedPayloadTokenFilter tf = factory.create(input);
    tf.reset();
    while (tf.incrementToken()){
      PayloadAttribute payAttr = tf.getAttribute(PayloadAttribute.class);
      assertTrue("payAttr is null and it shouldn't be", payAttr != null);
      byte[] payData = payAttr.getPayload().bytes;
      assertTrue("payData is null and it shouldn't be", payData != null);
View Full Code Here

Examples of org.apache.lucene.analysis.shingle.ShingleFilter.reset()

    Vector vector = new RandomAccessSparseVector(dimension, value.length()); // guess at initial size

    if (maxNGramSize >= 2) {
      ShingleFilter sf = new ShingleFilter(new IteratorTokenStream(value.getEntries().iterator()), maxNGramSize);
      sf.reset();
      try {
        do {
          String term = sf.getAttribute(CharTermAttribute.class).toString();
          if (!term.isEmpty() && dictionary.containsKey(term)) { // ngram
            int termId = dictionary.get(term);
View Full Code Here

Examples of org.apache.lucene.analysis.standard.StandardTokenizer.reset()

    StandardTokenizer st = new StandardTokenizer(Version.LUCENE_30, new StringReader(ContentUtils.stripDiacritics(term)));
    CharTermAttribute termAtt = st.addAttribute(CharTermAttribute.class);
    StringBuffer sb = new StringBuffer();
    try {
      try {
        st.reset();
        while (st.incrementToken()) {
          if (sb.length() > 0) {
            sb.append(" +");
          }
          else {
View Full Code Here

Examples of org.apache.lucene.analysis.wikipedia.WikipediaTokenizer.reset()

    // now check the flags, TODO: add way to check flags from BaseTokenStreamTestCase?
    tf = new WikipediaTokenizer(new StringReader(test), WikipediaTokenizer.BOTH, untoks);
    int expectedFlags[] = new int[] { UNTOKENIZED_TOKEN_FLAG, 0, 0, 0, 0, UNTOKENIZED_TOKEN_FLAG, 0, 0, 0, 0,
        0, 0, 0, UNTOKENIZED_TOKEN_FLAG, 0, 0, 0, UNTOKENIZED_TOKEN_FLAG, 0, 0, UNTOKENIZED_TOKEN_FLAG, 0, 0, 0 };
    FlagsAttribute flagsAtt = tf.addAttribute(FlagsAttribute.class);
    tf.reset();
    for (int i = 0; i < expectedFlags.length; i++) {
      assertTrue(tf.incrementToken());
      assertEquals("flags " + i, expectedFlags[i], flagsAtt.getFlags());
    }
    assertFalse(tf.incrementToken());
View Full Code Here

Examples of org.apache.lucene.benchmark.stats.TimeData.reset()

                cnt++;
                if (cnt % logStep == 0)
                {
                    System.err.println(" - processed " + cnt + ", run id=" + trd.getId());
                    trd.addData(td);
                    td.reset();
                }
            }
        }
        trd.addData(td);
    }
View Full Code Here

Examples of org.apache.lucene.facet.index.CategoryListPayloadStream.reset()

    assertEquals("Wrong value in byte stream", 100000000, decoder.decode());
    assertEquals("Wrong value in byte stream", 1000000000, decoder.decode());
    assertEquals("Wrong value in byte stream", Integer.MAX_VALUE, decoder.decode());
    assertEquals("End of stream not reached", IntDecoder.EOS, decoder.decode());

    clps.reset();
    decoder.reInit(bais);
    assertEquals("End of stream not reached", IntDecoder.EOS, decoder.decode());
  }

}
View Full Code Here

Examples of org.apache.lucene.facet.index.streaming.CategoryAttributesStream.reset()

      nTokens++;
    }
    assertEquals("Wrong number of tokens", 3, nTokens);

    // test reset
    stream.reset();
    nTokens = 0;
    while (stream.incrementToken()) {
      nTokens++;
    }
    assertEquals("Wrong number of tokens", 3, nTokens);
View Full Code Here

Examples of org.apache.lucene.store.ByteArrayDataInput.reset()

      AtomicReader reader = context.reader();
      BytesRef scratch = new BytesRef(bytes);
      BinaryDocValues dv = reader.getBinaryDocValues("dv");
      for (int i = 0; i < reader.maxDoc(); i++) {
        dv.get(i, scratch);
        input.reset(scratch.bytes, scratch.offset, scratch.length);
        assertEquals(expectedValue % 65535, input.readVInt());
        assertTrue(input.eof());
        expectedValue++;
      }
    }
View Full Code Here

Examples of org.apache.lucene.store.ByteArrayDataOutput.reset()

    BytesRef data = new BytesRef(bytes);
    BinaryDocValuesField dvField = new BinaryDocValuesField("dv", data);
    doc.add(dvField);
   
    for (int i = 0; i < Integer.MAX_VALUE; i++) {
      encoder.reset(bytes);
      encoder.writeVInt(i % 65535); // 1, 2, or 3 bytes
      data.length = encoder.getPosition();
      w.addDocument(doc);
      if (i % 100000 == 0) {
        System.out.println("indexed: " + i);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.