Examples of MockRAMDirectory


Examples of org.apache.lucene.store.MockRAMDirectory

  /**
   * Tests creating a segment, then check to insure the segment can be seen via
   * IW.getReader
   */
  public void doTestIndexWriterReopenSegment(boolean optimize) throws Exception {
    Directory dir1 = new MockRAMDirectory();
    IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(),
        IndexWriter.MaxFieldLength.LIMITED);
    writer.setInfoStream(infoStream);
    IndexReader r1 = writer.getReader();
    assertEquals(0, r1.maxDoc());
    createIndexNoClose(false, "index1", writer);
    writer.flush(!optimize, true, true);

    IndexReader iwr1 = writer.getReader();
    assertEquals(100, iwr1.maxDoc());

    IndexReader r2 = writer.getReader();
    assertEquals(r2.maxDoc(), 100);
    // add 100 documents
    for (int x = 10000; x < 10000 + 100; x++) {
      Document d = createDocument(x, "index1", 5);
      writer.addDocument(d);
    }
    writer.flush(false, true, true);
    // verify the reader was reopened internally
    IndexReader iwr2 = writer.getReader();
    assertTrue(iwr2 != r1);
    assertEquals(200, iwr2.maxDoc());
    // should have flushed out a segment
    IndexReader r3 = writer.getReader();
    assertTrue(r2 != r3);
    assertEquals(200, r3.maxDoc());

    // dec ref the readers rather than close them because
    // closing flushes changes to the writer
    r1.close();
    iwr1.close();
    r2.close();
    r3.close();
    iwr2.close();
    writer.close();

    // test whether the changes made it to the directory
    writer = new IndexWriter(dir1, new WhitespaceAnalyzer(),
        IndexWriter.MaxFieldLength.LIMITED);
    IndexReader w2r1 = writer.getReader();
    // insure the deletes were actually flushed to the directory
    assertEquals(200, w2r1.maxDoc());
    w2r1.close();
    writer.close();

    dir1.close();
  }
View Full Code Here

Examples of org.apache.lucene.store.MockRAMDirectory

    }
  }

  public void testMergeWarmer() throws Exception {

    Directory dir1 = new MockRAMDirectory();
    IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(),
                                         IndexWriter.MaxFieldLength.LIMITED);
    writer.setInfoStream(infoStream);

    // create the index
    createIndexNoClose(false, "test", writer);

    // get a reader to put writer into near real-time mode
    IndexReader r1 = writer.getReader();

    // Enroll warmer
    MyWarmer warmer = new MyWarmer();
    writer.setMergedSegmentWarmer(warmer);
    writer.setMergeFactor(2);
    writer.setMaxBufferedDocs(2);

    for (int i = 0; i < 100; i++) {
      writer.addDocument(createDocument(i, "test", 4));
    }
    ((ConcurrentMergeScheduler) writer.getMergeScheduler()).sync();

    assertTrue(warmer.warmCount > 0);
    final int count = warmer.warmCount;

    writer.addDocument(createDocument(17, "test", 4));
    writer.optimize();
    assertTrue(warmer.warmCount > count);
   
    writer.close();
    r1.close();
    dir1.close();
  }
View Full Code Here

Examples of org.apache.lucene.store.MockRAMDirectory

    r1.close();
    dir1.close();
  }

  public void testAfterCommit() throws Exception {
    Directory dir1 = new MockRAMDirectory();
    IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(),
                                         IndexWriter.MaxFieldLength.LIMITED);
    writer.setInfoStream(infoStream);

    // create the index
    createIndexNoClose(false, "test", writer);

    // get a reader to put writer into near real-time mode
    IndexReader r1 = writer.getReader();
    _TestUtil.checkIndex(dir1);
    writer.commit();
    _TestUtil.checkIndex(dir1);
    assertEquals(100, r1.numDocs());

    for (int i = 0; i < 10; i++) {
      writer.addDocument(createDocument(i, "test", 4));
    }
    ((ConcurrentMergeScheduler) writer.getMergeScheduler()).sync();

    IndexReader r2 = r1.reopen();
    if (r2 != r1) {
      r1.close();
      r1 = r2;
    }
    assertEquals(110, r1.numDocs());
    writer.close();
    r1.close();
    dir1.close();
  }
View Full Code Here

Examples of org.apache.lucene.store.MockRAMDirectory

    dir1.close();
  }

  // Make sure reader remains usable even if IndexWriter closes
  public void testAfterClose() throws Exception {
    Directory dir1 = new MockRAMDirectory();
    IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(),
                                         IndexWriter.MaxFieldLength.LIMITED);
    writer.setInfoStream(infoStream);

    // create the index
    createIndexNoClose(false, "test", writer);

    IndexReader r = writer.getReader();
    writer.close();

    _TestUtil.checkIndex(dir1);

    // reader should remain usable even after IndexWriter is closed:
    assertEquals(100, r.numDocs());
    Query q = new TermQuery(new Term("indexname", "test"));
    assertEquals(100, new IndexSearcher(r).search(q, 10).totalHits);

    try {
      r.reopen();
      fail("failed to hit AlreadyClosedException");
    } catch (AlreadyClosedException ace) {
      // expected
    }
    r.close();
    dir1.close();
  }
View Full Code Here

Examples of org.apache.lucene.store.MockRAMDirectory

    dir1.close();
  }

  // Stress test reopen during addIndexes
  public void testDuringAddIndexes() throws Exception {
    Directory dir1 = new MockRAMDirectory();
    final IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(),
                                               IndexWriter.MaxFieldLength.LIMITED);
    writer.setInfoStream(infoStream);
    writer.setMergeFactor(2);

    // create the index
    createIndexNoClose(false, "test", writer);
    writer.commit();

    final Directory[] dirs = new Directory[10];
    for (int i=0;i<10;i++) {
      dirs[i] = new MockRAMDirectory(dir1);
    }

    IndexReader r = writer.getReader();

    final int NUM_THREAD = 5;
    final float SECONDS = 3;

    final long endTime = (long) (System.currentTimeMillis() + 1000.*SECONDS);
    final List excs = Collections.synchronizedList(new ArrayList());

    final Thread[] threads = new Thread[NUM_THREAD];
    for(int i=0;i<NUM_THREAD;i++) {
      threads[i] = new Thread() {
          @Override
          public void run() {
            while(System.currentTimeMillis() < endTime) {
              try {
                writer.addIndexesNoOptimize(dirs);
              } catch (Throwable t) {
                excs.add(t);
                throw new RuntimeException(t);
              }
            }
          }
        };
      threads[i].setDaemon(true);
      threads[i].start();
    }

    int lastCount = 0;
    while(System.currentTimeMillis() < endTime) {
      IndexReader r2 = r.reopen();
      if (r2 != r) {
        r.close();
        r = r2;
      }
      Query q = new TermQuery(new Term("indexname", "test"));
      final int count = new IndexSearcher(r).search(q, 10).totalHits;
      assertTrue(count >= lastCount);
      lastCount = count;
    }

    for(int i=0;i<NUM_THREAD;i++) {
      threads[i].join();
    }

    assertEquals(0, excs.size());
    writer.close();

    _TestUtil.checkIndex(dir1);
    r.close();
    dir1.close();
  }
View Full Code Here

Examples of org.apache.lucene.store.MockRAMDirectory

    dir1.close();
  }

  // Stress test reopen during add/delete
  public void testDuringAddDelete() throws Exception {
    Directory dir1 = new MockRAMDirectory();
    final IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(),
                                               IndexWriter.MaxFieldLength.LIMITED);
    writer.setInfoStream(infoStream);
    writer.setMergeFactor(2);

    // create the index
    createIndexNoClose(false, "test", writer);
    writer.commit();

    IndexReader r = writer.getReader();

    final int NUM_THREAD = 5;
    final float SECONDS = 3;

    final long endTime = (long) (System.currentTimeMillis() + 1000.*SECONDS);
    final List excs = Collections.synchronizedList(new ArrayList());

    final Thread[] threads = new Thread[NUM_THREAD];
    for(int i=0;i<NUM_THREAD;i++) {
      threads[i] = new Thread() {
          @Override
          public void run() {
            int count = 0;
            final Random r = new Random();
            while(System.currentTimeMillis() < endTime) {
              try {
                for(int i=0;i<10;i++) {
                  writer.addDocument(createDocument(10*count+i, "test", 4));
                }
                count++;
                final int limit = count*10;
                for(int i=0;i<5;i++) {
                  int x = r.nextInt(limit);
                  writer.deleteDocuments(new Term("field3", "b"+x));
                }
              } catch (Throwable t) {
                excs.add(t);
                throw new RuntimeException(t);
              }
            }
          }
        };
      threads[i].setDaemon(true);
      threads[i].start();
    }

    int sum = 0;
    while(System.currentTimeMillis() < endTime) {
      IndexReader r2 = r.reopen();
      if (r2 != r) {
        r.close();
        r = r2;
      }
      Query q = new TermQuery(new Term("indexname", "test"));
      sum += new IndexSearcher(r).search(q, 10).totalHits;
    }

    for(int i=0;i<NUM_THREAD;i++) {
      threads[i].join();
    }
    assertTrue(sum > 0);

    assertEquals(0, excs.size());
    writer.close();

    _TestUtil.checkIndex(dir1);
    r.close();
    dir1.close();
  }
View Full Code Here

Examples of org.apache.lucene.store.MockRAMDirectory

    r.close();
    dir1.close();
  }

  public void testExpungeDeletes() throws Throwable {
    Directory dir = new MockRAMDirectory();
    final IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(),
                                               IndexWriter.MaxFieldLength.LIMITED);
    Document doc = new Document();
    doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
    Field id = new Field("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
    doc.add(id);
    id.setValue("0");
    w.addDocument(doc);
    id.setValue("1");
    w.addDocument(doc);
    w.deleteDocuments(new Term("id", "0"));

    IndexReader r = w.getReader();
    w.expungeDeletes();
    w.close();
    r.close();
    r = IndexReader.open(dir, true);
    assertEquals(1, r.numDocs());
    assertFalse(r.hasDeletions());
    r.close();
    dir.close();
  }
View Full Code Here

Examples of org.apache.lucene.store.MockRAMDirectory

    r.close();
    dir.close();
  }

  public void testDeletesNumDocs() throws Throwable {
    Directory dir = new MockRAMDirectory();
    final IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(),
                                               IndexWriter.MaxFieldLength.LIMITED);
    Document doc = new Document();
    doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
    Field id = new Field("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
    doc.add(id);
    id.setValue("0");
    w.addDocument(doc);
    id.setValue("1");
    w.addDocument(doc);
    IndexReader r = w.getReader();
    assertEquals(2, r.numDocs());
    r.close();

    w.deleteDocuments(new Term("id", "0"));
    r = w.getReader();
    assertEquals(1, r.numDocs());
    r.close();

    w.deleteDocuments(new Term("id", "1"));
    r = w.getReader();
    assertEquals(0, r.numDocs());
    r.close();

    w.close();
    dir.close();
  }
View Full Code Here

Examples of org.apache.lucene.store.MockRAMDirectory

    w.close();
    dir.close();
  }

  public void testSegmentWarmer() throws Exception {
    Directory dir = new MockRAMDirectory();
    IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
    w.setMaxBufferedDocs(2);
    w.getReader().close();

    w.setMergedSegmentWarmer(new IndexWriter.IndexReaderWarmer() {
        public void warm(IndexReader r) throws IOException {
          final IndexSearcher s = new IndexSearcher(r);
          final TopDocs hits = s.search(new TermQuery(new Term("foo", "bar")), 10);
          assertEquals(20, hits.totalHits);
        }
      });
   
    Document doc = new Document();
    doc.add(new Field("foo", "bar", Field.Store.YES, Field.Index.NOT_ANALYZED));
    for(int i=0;i<20;i++) {
      w.addDocument(doc);
    }
    w.waitForMerges();
    w.close();
    dir.close();
  }
View Full Code Here

Examples of org.apache.lucene.store.MockRAMDirectory

    public void testEmptyIndex()
        throws Exception
    {
        // creating two directories for indices
        Directory indexStoreA = new MockRAMDirectory();
        Directory indexStoreB = new MockRAMDirectory();

        // creating a document to store
        Document lDoc = new Document();
        lDoc.add(new Field("fulltext", "Once upon a time.....", Field.Store.YES, Field.Index.ANALYZED));
        lDoc.add(new Field("id", "doc1", Field.Store.YES, Field.Index.NOT_ANALYZED));
        lDoc.add(new Field("handle", "1", Field.Store.YES, Field.Index.NOT_ANALYZED));

        // creating a document to store
        Document lDoc2 = new Document();
        lDoc2.add(new Field("fulltext", "in a galaxy far far away.....",
            Field.Store.YES, Field.Index.ANALYZED));
        lDoc2.add(new Field("id", "doc2", Field.Store.YES, Field.Index.NOT_ANALYZED));
        lDoc2.add(new Field("handle", "1", Field.Store.YES, Field.Index.NOT_ANALYZED));

        // creating a document to store
        Document lDoc3 = new Document();
        lDoc3.add(new Field("fulltext", "a bizarre bug manifested itself....",
            Field.Store.YES, Field.Index.ANALYZED));
        lDoc3.add(new Field("id", "doc3", Field.Store.YES, Field.Index.NOT_ANALYZED));
        lDoc3.add(new Field("handle", "1", Field.Store.YES, Field.Index.NOT_ANALYZED));

        // creating an index writer for the first index
        IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
        // creating an index writer for the second index, but writing nothing
        IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);

        //--------------------------------------------------------------------
        // scenario 1
        //--------------------------------------------------------------------

        // writing the documents to the first index
        writerA.addDocument(lDoc);
        writerA.addDocument(lDoc2);
        writerA.addDocument(lDoc3);
        writerA.optimize();
        writerA.close();

        // closing the second index
        writerB.close();

        // creating the query
        QueryParser parser = new QueryParser(Version.LUCENE_CURRENT, "fulltext", new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT));
        Query query = parser.parse("handle:1");

        // building the searchables
        Searcher[] searchers = new Searcher[2];
        // VITAL STEP:adding the searcher for the empty index first, before the searcher for the populated index
        searchers[0] = new IndexSearcher(indexStoreB, true);
        searchers[1] = new IndexSearcher(indexStoreA, true);
        // creating the multiSearcher
        Searcher mSearcher = getMultiSearcherInstance(searchers);
        // performing the search
        ScoreDoc[] hits = mSearcher.search(query, null, 1000).scoreDocs;

        assertEquals(3, hits.length);

        // iterating over the hit documents
        for (int i = 0; i < hits.length; i++) {
          mSearcher.doc(hits[i].doc);
        }
        mSearcher.close();


        //--------------------------------------------------------------------
        // scenario 2
        //--------------------------------------------------------------------

        // adding one document to the empty index
        writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
        writerB.addDocument(lDoc);
        writerB.optimize();
        writerB.close();

        // building the searchables
        Searcher[] searchers2 = new Searcher[2];
        // VITAL STEP:adding the searcher for the empty index first, before the searcher for the populated index
        searchers2[0] = new IndexSearcher(indexStoreB, true);
        searchers2[1] = new IndexSearcher(indexStoreA, true);
        // creating the mulitSearcher
        MultiSearcher mSearcher2 = getMultiSearcherInstance(searchers2);
        // performing the same search
        ScoreDoc[] hits2 = mSearcher2.search(query, null, 1000).scoreDocs;

        assertEquals(4, hits2.length);

        // iterating over the hit documents
        for (int i = 0; i < hits2.length; i++) {
          // no exception should happen at this point
          mSearcher2.doc(hits2[i].doc);
        }

        // test the subSearcher() method:
        Query subSearcherQuery = parser.parse("id:doc1");
        hits2 = mSearcher2.search(subSearcherQuery, null, 1000).scoreDocs;
        assertEquals(2, hits2.length);
        assertEquals(0, mSearcher2.subSearcher(hits2[0].doc));   // hit from searchers2[0]
        assertEquals(1, mSearcher2.subSearcher(hits2[1].doc));   // hit from searchers2[1]
        subSearcherQuery = parser.parse("id:doc2");
        hits2 = mSearcher2.search(subSearcherQuery, null, 1000).scoreDocs;
        assertEquals(1, hits2.length);
        assertEquals(1, mSearcher2.subSearcher(hits2[0].doc));   // hit from searchers2[1]
        mSearcher2.close();

        //--------------------------------------------------------------------
        // scenario 3
        //--------------------------------------------------------------------

        // deleting the document just added, this will cause a different exception to take place
        Term term = new Term("id", "doc1");
        IndexReader readerB = IndexReader.open(indexStoreB, false);
        readerB.deleteDocuments(term);
        readerB.close();

        // optimizing the index with the writer
        writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
        writerB.optimize();
        writerB.close();

        // building the searchables
        Searcher[] searchers3 = new Searcher[2];

        searchers3[0] = new IndexSearcher(indexStoreB, true);
        searchers3[1] = new IndexSearcher(indexStoreA, true);
        // creating the mulitSearcher
        Searcher mSearcher3 = getMultiSearcherInstance(searchers3);
        // performing the same search
        ScoreDoc[] hits3 = mSearcher3.search(query, null, 1000).scoreDocs;

        assertEquals(3, hits3.length);

        // iterating over the hit documents
        for (int i = 0; i < hits3.length; i++) {
          mSearcher3.doc(hits3[i].doc);
        }
        mSearcher3.close();
        indexStoreA.close();
        indexStoreB.close();
    }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.