Examples of DirectoryReader


Examples of org.apache.lucene.index.DirectoryReader

    doc.add(new StoredField("bogusterms", "bogus"));
    doc.add(new StoredField("bogustermsindex", "bogus"));
    doc.add(new StoredField("bogusmultivalued", "bogus"));
    doc.add(new StoredField("bogusbits", "bogus"));
    iw.addDocument(doc);
    DirectoryReader ir = iw.getReader();
    iw.close();
   
    AtomicReader ar = getOnlySegmentReader(ir);
   
    final FieldCache cache = FieldCache.DEFAULT;
    cache.purgeAllCaches();
    assertEquals(0, cache.getCacheEntries().length);
   
    Bytes bytes = cache.getBytes(ar, "bogusbytes", true);
    assertEquals(0, bytes.get(0));

    Shorts shorts = cache.getShorts(ar, "bogusshorts", true);
    assertEquals(0, shorts.get(0));
   
    Ints ints = cache.getInts(ar, "bogusints", true);
    assertEquals(0, ints.get(0));
   
    Longs longs = cache.getLongs(ar, "boguslongs", true);
    assertEquals(0, longs.get(0));
   
    Floats floats = cache.getFloats(ar, "bogusfloats", true);
    assertEquals(0, floats.get(0), 0.0f);
   
    Doubles doubles = cache.getDoubles(ar, "bogusdoubles", true);
    assertEquals(0, doubles.get(0), 0.0D);
   
    BytesRef scratch = new BytesRef();
    BinaryDocValues binaries = cache.getTerms(ar, "bogusterms", true);
    binaries.get(0, scratch);
    assertEquals(0, scratch.length);
   
    SortedDocValues sorted = cache.getTermsIndex(ar, "bogustermsindex");
    assertEquals(-1, sorted.getOrd(0));
    sorted.get(0, scratch);
    assertEquals(0, scratch.length);
   
    SortedSetDocValues sortedSet = cache.getDocTermOrds(ar, "bogusmultivalued");
    sortedSet.setDocument(0);
    assertEquals(SortedSetDocValues.NO_MORE_ORDS, sortedSet.nextOrd());
   
    Bits bits = cache.getDocsWithField(ar, "bogusbits");
    assertFalse(bits.get(0));
   
    // check that we cached nothing
    assertEquals(0, cache.getCacheEntries().length);
    ir.close();
    dir.close();
  }
View Full Code Here

Examples of org.apache.lucene.index.DirectoryReader

        field.setLongValue(v);
        iw.addDocument(doc);
      }
    }
    iw.forceMerge(1);
    final DirectoryReader reader = iw.getReader();
    final FieldCache.Longs longs = FieldCache.DEFAULT.getLongs(getOnlySegmentReader(reader), "f", false);
    for (int i = 0; i < values.length; ++i) {
      assertEquals(values[i], longs.get(i));
    }
    reader.close();
    iw.close();
    dir.close();
  }
View Full Code Here

Examples of org.apache.lucene.index.DirectoryReader

        field.setIntValue(v);
        iw.addDocument(doc);
      }
    }
    iw.forceMerge(1);
    final DirectoryReader reader = iw.getReader();
    final FieldCache.Ints ints = FieldCache.DEFAULT.getInts(getOnlySegmentReader(reader), "f", false);
    for (int i = 0; i < values.length; ++i) {
      assertEquals(values[i], ints.get(i));
    }
    reader.close();
    iw.close();
    dir.close();
  }
View Full Code Here

Examples of org.apache.lucene.index.DirectoryReader

    // NOTE: cannot use writer.getReader because RIW (on
    // flipping a coin) may give us a newly opened reader,
    // but we use .reopen on this reader below and expect to
    // (must) get an NRT reader:
    DirectoryReader reader = DirectoryReader.open(writer.w, true);
    // same reason we don't wrap?
    IndexSearcher searcher = newSearcher(reader, false);

    // add a doc, refresh the reader, and check that it's there
    Document doc = new Document();
    doc.add(newStringField("id", "1", Field.Store.YES));
    writer.addDocument(doc);

    reader = refreshReader(reader);
    searcher = newSearcher(reader, false);

    TopDocs docs = searcher.search(new MatchAllDocsQuery(), 1);
    assertEquals("Should find a hit...", 1, docs.totalHits);

    final Filter startFilter = new QueryWrapperFilter(new TermQuery(new Term("id", "1")));

    CachingWrapperFilter filter = new CachingWrapperFilter(startFilter);

    docs = searcher.search(new MatchAllDocsQuery(), filter, 1);
    assertTrue(filter.sizeInBytes() > 0);

    assertEquals("[query + filter] Should find a hit...", 1, docs.totalHits);

    Query constantScore = new ConstantScoreQuery(filter);
    docs = searcher.search(constantScore, 1);
    assertEquals("[just filter] Should find a hit...", 1, docs.totalHits);

    // make sure we get a cache hit when we reopen reader
    // that had no change to deletions

    // fake delete (deletes nothing):
    writer.deleteDocuments(new Term("foo", "bar"));

    IndexReader oldReader = reader;
    reader = refreshReader(reader);
    assertTrue(reader == oldReader);
    int missCount = filter.missCount;
    docs = searcher.search(constantScore, 1);
    assertEquals("[just filter] Should find a hit...", 1, docs.totalHits);

    // cache hit:
    assertEquals(missCount, filter.missCount);

    // now delete the doc, refresh the reader, and see that it's not there
    writer.deleteDocuments(new Term("id", "1"));

    // NOTE: important to hold ref here so GC doesn't clear
    // the cache entry!  Else the assert below may sometimes
    // fail:
    oldReader = reader;
    reader = refreshReader(reader);

    searcher = newSearcher(reader, false);

    missCount = filter.missCount;
    docs = searcher.search(new MatchAllDocsQuery(), filter, 1);
    assertEquals("[query + filter] Should *not* find a hit...", 0, docs.totalHits);

    // cache hit
    assertEquals(missCount, filter.missCount);
    docs = searcher.search(constantScore, 1);
    assertEquals("[just filter] Should *not* find a hit...", 0, docs.totalHits);

    // apply deletes dynamically:
    filter = new CachingWrapperFilter(startFilter);
    writer.addDocument(doc);
    reader = refreshReader(reader);
    searcher = newSearcher(reader, false);

    docs = searcher.search(new MatchAllDocsQuery(), filter, 1);
    assertEquals("[query + filter] Should find a hit...", 1, docs.totalHits);
    missCount = filter.missCount;
    assertTrue(missCount > 0);
    constantScore = new ConstantScoreQuery(filter);
    docs = searcher.search(constantScore, 1);
    assertEquals("[just filter] Should find a hit...", 1, docs.totalHits);
    assertEquals(missCount, filter.missCount);

    writer.addDocument(doc);

    // NOTE: important to hold ref here so GC doesn't clear
    // the cache entry!  Else the assert below may sometimes
    // fail:
    oldReader = reader;

    reader = refreshReader(reader);
    searcher = newSearcher(reader, false);
       
    docs = searcher.search(new MatchAllDocsQuery(), filter, 1);
    assertEquals("[query + filter] Should find 2 hits...", 2, docs.totalHits);
    assertTrue(filter.missCount > missCount);
    missCount = filter.missCount;

    constantScore = new ConstantScoreQuery(filter);
    docs = searcher.search(constantScore, 1);
    assertEquals("[just filter] Should find a hit...", 2, docs.totalHits);
    assertEquals(missCount, filter.missCount);

    // now delete the doc, refresh the reader, and see that it's not there
    writer.deleteDocuments(new Term("id", "1"));

    reader = refreshReader(reader);
    searcher = newSearcher(reader, false);

    docs = searcher.search(new MatchAllDocsQuery(), filter, 1);
    assertEquals("[query + filter] Should *not* find a hit...", 0, docs.totalHits);
    // CWF reused the same entry (it dynamically applied the deletes):
    assertEquals(missCount, filter.missCount);

    docs = searcher.search(constantScore, 1);
    assertEquals("[just filter] Should *not* find a hit...", 0, docs.totalHits);
    // CWF reused the same entry (it dynamically applied the deletes):
    assertEquals(missCount, filter.missCount);

    // NOTE: silliness to make sure JRE does not eliminate
    // our holding onto oldReader to prevent
    // CachingWrapperFilter's WeakHashMap from dropping the
    // entry:
    assertTrue(oldReader != null);

    reader.close();
    writer.close();
    dir.close();
  }
View Full Code Here

Examples of org.apache.lucene.index.DirectoryReader

    writer.close();
    dir.close();
  }

  private static DirectoryReader refreshReader(DirectoryReader reader) throws IOException {
    DirectoryReader oldReader = reader;
    reader = DirectoryReader.openIfChanged(reader);
    if (reader != null) {
      oldReader.close();
      return reader;
    } else {
      return oldReader;
    }
  }
View Full Code Here

Examples of org.apache.lucene.index.DirectoryReader

        } else {
          w.addDocument(doc);
        }
      }

      final DirectoryReader r = w.getReader();
      w.close();

      // NOTE: intentional but temporary field cache insanity!
      final FieldCache.Ints docIdToFieldId = FieldCache.DEFAULT.getInts(SlowCompositeReaderWrapper.wrap(r), "id", false);
      final int[] fieldIdToDocID = new int[numDocs];
      for (int i = 0; i < numDocs; i++) {
        int fieldId = docIdToFieldId.get(i);
        fieldIdToDocID[fieldId] = i;
      }

      try {
        final IndexSearcher s = newSearcher(r);
        if (SlowCompositeReaderWrapper.class.isAssignableFrom(s.getIndexReader().getClass())) {
          canUseIDV = false;
        } else {
          canUseIDV = !preFlex;
        }

        for (int contentID = 0; contentID < 3; contentID++) {
          final ScoreDoc[] hits = s.search(new TermQuery(new Term("content", "real" + contentID)), numDocs).scoreDocs;
          for (ScoreDoc hit : hits) {
            final GroupDoc gd = groupDocs[docIdToFieldId.get(hit.doc)];
            assertTrue(gd.score == 0.0);
            gd.score = hit.score;
            int docId = gd.id;
            assertEquals(docId, docIdToFieldId.get(hit.doc));
          }
        }

        for (GroupDoc gd : groupDocs) {
          assertTrue(gd.score != 0.0);
        }

        for (int searchIter = 0; searchIter < 100; searchIter++) {

          if (VERBOSE) {
            System.out.println("TEST: searchIter=" + searchIter);
          }

          final String searchTerm = "real" + random().nextInt(3);
          boolean sortByScoreOnly = random().nextBoolean();
          Sort sortWithinGroup = getRandomSort(sortByScoreOnly);
          AbstractAllGroupHeadsCollector<?> allGroupHeadsCollector = createRandomCollector("group", sortWithinGroup, canUseIDV, valueType);
          s.search(new TermQuery(new Term("content", searchTerm)), allGroupHeadsCollector);
          int[] expectedGroupHeads = createExpectedGroupHeads(searchTerm, groupDocs, sortWithinGroup, sortByScoreOnly, fieldIdToDocID);
          int[] actualGroupHeads = allGroupHeadsCollector.retrieveGroupHeads();
          // The actual group heads contains Lucene ids. Need to change them into our id value.
          for (int i = 0; i < actualGroupHeads.length; i++) {
            actualGroupHeads[i] = docIdToFieldId.get(actualGroupHeads[i]);
          }
          // Allows us the easily iterate and assert the actual and expected results.
          Arrays.sort(expectedGroupHeads);
          Arrays.sort(actualGroupHeads);

          if (VERBOSE) {
            System.out.println("Collector: " + allGroupHeadsCollector.getClass().getSimpleName());
            System.out.println("Sort within group: " + sortWithinGroup);
            System.out.println("Num group: " + numGroups);
            System.out.println("Num doc: " + numDocs);
            System.out.println("\n=== Expected: \n");
            for (int expectedDocId : expectedGroupHeads) {
              GroupDoc expectedGroupDoc = groupDocs[expectedDocId];
              String expectedGroup = expectedGroupDoc.group == null ? null : expectedGroupDoc.group.utf8ToString();
              System.out.println(
                  String.format(Locale.ROOT,
                      "Group:%10s score%5f Sort1:%10s Sort2:%10s Sort3:%10s doc:%5d",
                      expectedGroup, expectedGroupDoc.score, expectedGroupDoc.sort1.utf8ToString(),
                      expectedGroupDoc.sort2.utf8ToString(), expectedGroupDoc.sort3.utf8ToString(), expectedDocId
                  )
              );
            }
            System.out.println("\n=== Actual: \n");
            for (int actualDocId : actualGroupHeads) {
              GroupDoc actualGroupDoc = groupDocs[actualDocId];
              String actualGroup = actualGroupDoc.group == null ? null : actualGroupDoc.group.utf8ToString();
              System.out.println(
                  String.format(Locale.ROOT,
                      "Group:%10s score%5f Sort1:%10s Sort2:%10s Sort3:%10s doc:%5d",
                      actualGroup, actualGroupDoc.score, actualGroupDoc.sort1.utf8ToString(),
                      actualGroupDoc.sort2.utf8ToString(), actualGroupDoc.sort3.utf8ToString(), actualDocId
                  )
              );
            }
            System.out.println("\n===================================================================================");
          }

          assertEquals(expectedGroupHeads.length, actualGroupHeads.length);
          for (int i = 0; i < expectedGroupHeads.length; i++) {
            assertEquals(expectedGroupHeads[i], actualGroupHeads[i]);
          }
        }
      } finally {
        QueryUtils.purgeFieldCache(r);
      }

      r.close();
      dir.close();
    }
  }
View Full Code Here

Examples of org.apache.lucene.index.DirectoryReader

    config.setMergePolicy(mp);

   
    populate(directory, config);

    DirectoryReader r0 = IndexReader.open(directory);
    SegmentReader r = LuceneTestCase.getOnlySegmentReader(r0);
    String segment = r.getSegmentName();
    r.close();

    FieldInfosReader infosReader = new PreFlexRWCodec().fieldInfosFormat().getFieldInfosReader();
View Full Code Here

Examples of org.apache.lucene.index.DirectoryReader

        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp));
    int numdocs = atLeast(20);
    createRandomIndex(numdocs, writer, random());
    writer.commit();

    DirectoryReader open = DirectoryReader.open(dir);
    for (AtomicReaderContext ctx : open.leaves()) {
      AtomicReader indexReader = ctx.reader();
      Terms terms = indexReader.terms("body");
      TermsEnum iterator = terms.iterator(null);
      IdentityHashMap<DocsEnum, Boolean> enums = new IdentityHashMap<DocsEnum, Boolean>();
      MatchNoBits bits = new Bits.MatchNoBits(indexReader.maxDoc());
View Full Code Here

Examples of org.apache.lucene.index.DirectoryReader

        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp));
    int numdocs = atLeast(20);
    createRandomIndex(numdocs, writer, random());
    writer.commit();

    DirectoryReader open = DirectoryReader.open(dir);
    for (AtomicReaderContext ctx : open.leaves()) {
      Terms terms = ctx.reader().terms("body");
      TermsEnum iterator = terms.iterator(null);
      IdentityHashMap<DocsEnum, Boolean> enums = new IdentityHashMap<DocsEnum, Boolean>();
      MatchNoBits bits = new Bits.MatchNoBits(open.maxDoc());
      DocsEnum docs = null;
      while ((iterator.next()) != null) {
        docs = iterator.docs(bits, docs, random().nextBoolean() ? DocsEnum.FLAG_FREQS : DocsEnum.FLAG_NONE);
        enums.put(docs, true);
      }
     
      assertEquals(1, enums.size());
      enums.clear();
      iterator = terms.iterator(null);
      docs = null;
      while ((iterator.next()) != null) {
        docs = iterator.docs(new Bits.MatchNoBits(open.maxDoc()), docs, random().nextBoolean() ? DocsEnum.FLAG_FREQS : DocsEnum.FLAG_NONE);
        enums.put(docs, true);
      }
      assertEquals(terms.size(), enums.size());
     
      enums.clear();
View Full Code Here

Examples of org.apache.lucene.index.DirectoryReader

        newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).setCodec(cp));
    int numdocs = atLeast(20);
    createRandomIndex(numdocs, writer, random());
    writer.commit();

    DirectoryReader firstReader = DirectoryReader.open(dir);
    DirectoryReader secondReader = DirectoryReader.open(dir);
    List<AtomicReaderContext> leaves = firstReader.leaves();
    List<AtomicReaderContext> leaves2 = secondReader.leaves();
   
    for (AtomicReaderContext ctx : leaves) {
      Terms terms = ctx.reader().terms("body");
      TermsEnum iterator = terms.iterator(null);
      IdentityHashMap<DocsEnum, Boolean> enums = new IdentityHashMap<DocsEnum, Boolean>();
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.