Package org.apache.lucene.store

Examples of org.apache.lucene.store.MockRAMDirectory


    index1.close();
    index2.close();
  }
 
  public void testThreadSafety() throws Exception {
    final Directory dir = new MockRAMDirectory();
    final int n = 150;

    IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
    for (int i = 0; i < n; i++) {
      writer.addDocument(createDocument(i, 3));
    }
    writer.optimize();
    writer.close();

    final TestReopen test = new TestReopen() {     
      @Override
      protected void modifyIndex(int i) throws IOException {
        if (i % 3 == 0) {
          IndexReader modifier = IndexReader.open(dir, false);
          modifier.setNorm(i, "field1", 50);
          modifier.close();
        } else if (i % 3 == 1) {
          IndexReader modifier = IndexReader.open(dir, false);
          modifier.deleteDocument(i % modifier.maxDoc());
          modifier.close();
        } else {
          IndexWriter modifier = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
          modifier.addDocument(createDocument(n + i, 6));
          modifier.close();
        }
      }

      @Override
      protected IndexReader openReader() throws IOException {
        return IndexReader.open(dir, false);
      }     
    };
   
    final List readers = Collections.synchronizedList(new ArrayList());
    IndexReader firstReader = IndexReader.open(dir, false);
    IndexReader reader = firstReader;
    final Random rnd = newRandom();
   
    ReaderThread[] threads = new ReaderThread[n];
    final Set readersToClose = Collections.synchronizedSet(new HashSet());
   
    for (int i = 0; i < n; i++) {
      if (i % 10 == 0) {
        IndexReader refreshed = reader.reopen();
        if (refreshed != reader) {
          readersToClose.add(reader);
        }
        reader = refreshed;
      }
      final IndexReader r = reader;
     
      final int index = i;   
     
      ReaderThreadTask task;
     
      if (i < 20 ||( i >=50 && i < 70) || i > 90) {
        task = new ReaderThreadTask() {
         
          @Override
          public void run() throws Exception {
            while (!stopped) {
              if (index % 2 == 0) {
                // refresh reader synchronized
                ReaderCouple c = (refreshReader(r, test, index, true));
                readersToClose.add(c.newReader);
                readersToClose.add(c.refreshedReader);
                readers.add(c);
                // prevent too many readers
                break;
              } else {
                // not synchronized
                IndexReader refreshed = r.reopen();
               
               
                IndexSearcher searcher = new IndexSearcher(refreshed);
                ScoreDoc[] hits = searcher.search(
                    new TermQuery(new Term("field1", "a" + rnd.nextInt(refreshed.maxDoc()))),
                    null, 1000).scoreDocs;
                if (hits.length > 0) {
                  searcher.doc(hits[0].doc);
                }
               
                // r might have changed because this is not a
                // synchronized method. However we don't want
                // to make it synchronized to test
                // thread-safety of IndexReader.close().
                // That's why we add refreshed also to
                // readersToClose, because double closing is fine
                if (refreshed != r) {
                  refreshed.close();
                }
                readersToClose.add(refreshed);
              }
              synchronized(this) {
                wait(1000);
              }
            }
          }
         
        };
      } else {
        task = new ReaderThreadTask() {
          @Override
          public void run() throws Exception {
            while (!stopped) {
              int numReaders = readers.size();
              if (numReaders > 0) {
                ReaderCouple c = (ReaderCouple) readers.get(rnd.nextInt(numReaders));
                TestIndexReader.assertIndexEquals(c.newReader, c.refreshedReader);
              }
             
              synchronized(this) {
                wait(100);
              }
            }
                       
          }
         
        };
      }
     
      threads[i] = new ReaderThread(task);
      threads[i].start();
    }
   
    synchronized(this) {
      wait(15000);
    }
   
    for (int i = 0; i < n; i++) {
      if (threads[i] != null) {
        threads[i].stopThread();
      }
    }
   
    for (int i = 0; i < n; i++) {
      if (threads[i] != null) {
        threads[i].join();
        if (threads[i].error != null) {
          String msg = "Error occurred in thread " + threads[i].getName() + ":\n" + threads[i].error.getMessage();
          fail(msg);
        }
      }
     
    }
   
    Iterator it = readersToClose.iterator();
    while (it.hasNext()) {
      ((IndexReader) it.next()).close();
    }
   
    firstReader.close();
    reader.close();
   
    it = readersToClose.iterator();
    while (it.hasNext()) {
      assertReaderClosed((IndexReader) it.next(), true, true);
    }

    assertReaderClosed(reader, true, true);
    assertReaderClosed(firstReader, true, true);

    dir.close();
  }
View Full Code Here


      throw new IOException("java.io.tmpdir undefined, cannot run test");
    indexDir = new File(tempDir, "IndexReaderReopen");
  }
 
  public void testCloseOrig() throws Throwable {
    Directory dir = new MockRAMDirectory();
    createIndex(dir, false);
    IndexReader r1 = IndexReader.open(dir, false);
    IndexReader r2 = IndexReader.open(dir, false);
    r2.deleteDocument(0);
    r2.close();

    IndexReader r3 = r1.reopen();
    assertTrue(r1 != r3);
    r1.close();
    try {
      r1.document(2);
      fail("did not hit exception");
    } catch (AlreadyClosedException ace) {
      // expected
    }
    r3.close();
    dir.close();
  }
View Full Code Here

    r3.close();
    dir.close();
  }

  public void testDeletes() throws Throwable {
    Directory dir = new MockRAMDirectory();
    createIndex(dir, false); // Create an index with a bunch of docs (1 segment)

    modifyIndex(0, dir); // Get delete bitVector on 1st segment
    modifyIndex(5, dir); // Add a doc (2 segments)

    IndexReader r1 = IndexReader.open(dir, false); // MSR

    modifyIndex(5, dir); // Add another doc (3 segments)

    IndexReader r2 = r1.reopen(); // MSR
    assertTrue(r1 != r2);

    SegmentReader sr1 = (SegmentReader) r1.getSequentialSubReaders()[0]; // Get SRs for the first segment from original
    SegmentReader sr2 = (SegmentReader) r2.getSequentialSubReaders()[0]; // and reopened IRs

    // At this point they share the same BitVector
    assertTrue(sr1.deletedDocs==sr2.deletedDocs);

    r2.deleteDocument(0);

    // r1 should not see the delete
    assertFalse(r1.isDeleted(0));

    // Now r2 should have made a private copy of deleted docs:
    assertTrue(sr1.deletedDocs!=sr2.deletedDocs);

    r1.close();
    r2.close();
    dir.close();
  }
View Full Code Here

    r2.close();
    dir.close();
  }

  public void testDeletes2() throws Throwable {
    Directory dir = new MockRAMDirectory();
    createIndex(dir, false);
    // Get delete bitVector
    modifyIndex(0, dir);
    IndexReader r1 = IndexReader.open(dir, false);

    // Add doc:
    modifyIndex(5, dir);

    IndexReader r2 = r1.reopen();
    assertTrue(r1 != r2);

    IndexReader[] rs2 = r2.getSequentialSubReaders();

    SegmentReader sr1 = SegmentReader.getOnlySegmentReader(r1);
    SegmentReader sr2 = (SegmentReader) rs2[0];

    // At this point they share the same BitVector
    assertTrue(sr1.deletedDocs==sr2.deletedDocs);
    final BitVector delDocs = sr1.deletedDocs;
    r1.close();

    r2.deleteDocument(0);
    assertTrue(delDocs==sr2.deletedDocs);
    r2.close();
    dir.close();
  }
View Full Code Here

    public void onCommit(List commits) {
    }
  }

  public void testReopenOnCommit() throws Throwable {
    Directory dir = new MockRAMDirectory();
    IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), new KeepAllCommits(), IndexWriter.MaxFieldLength.UNLIMITED);
    for(int i=0;i<4;i++) {
      Document doc = new Document();
      doc.add(new Field("id", ""+i, Field.Store.NO, Field.Index.NOT_ANALYZED));
      writer.addDocument(doc);
      Map data = new HashMap();
      data.put("index", i+"");
      writer.commit(data);
    }
    for(int i=0;i<4;i++) {
      writer.deleteDocuments(new Term("id", ""+i));
      Map data = new HashMap();
      data.put("index", (4+i)+"");
      writer.commit(data);
    }
    writer.close();

    IndexReader r = IndexReader.open(dir, false);
    assertEquals(0, r.numDocs());
    assertEquals(4, r.maxDoc());

    Iterator it = IndexReader.listCommits(dir).iterator();
    while(it.hasNext()) {
      IndexCommit commit = (IndexCommit) it.next();
      IndexReader r2 = r.reopen(commit);
      assertTrue(r2 != r);

      // Reader should be readOnly
      try {
        r2.deleteDocument(0);
        fail("no exception hit");
      } catch (UnsupportedOperationException uoe) {
        // expected
      }

      final Map s = commit.getUserData();
      final int v;
      if (s.size() == 0) {
        // First commit created by IW
        v = -1;
      } else {
        v = Integer.parseInt((String) s.get("index"));
      }
      if (v < 4) {
        assertEquals(1+v, r2.numDocs());
      } else {
        assertEquals(7-v, r2.numDocs());
      }
      r.close();
      r = r2;
    }
    r.close();
    dir.close();
  }
View Full Code Here

  public void testLucene() throws IOException
  {

    int num=100;

    Directory indexA = new MockRAMDirectory();
    Directory indexB = new MockRAMDirectory();

    fillIndex(indexA, 0, num);
    boolean fail = verifyIndex(indexA, 0);
    if (fail)
    {
      fail("Index a is invalid");
    }

    fillIndex(indexB, num, num);
    fail = verifyIndex(indexB, num);
    if (fail)
    {
      fail("Index b is invalid");
    }

    Directory merged = new MockRAMDirectory();

    IndexWriter writer = new IndexWriter(merged, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
    writer.setMergeFactor(2);

    writer.addIndexesNoOptimize(new Directory[]{indexA, indexB});
    writer.optimize();
    writer.close();

    fail = verifyIndex(merged, 0);
    merged.close();

    assertFalse("The merged index is invalid", fail);
  }
View Full Code Here

import org.apache.lucene.document.Field;

public class TestCrash extends LuceneTestCase {

  private IndexWriter initIndex() throws IOException {
    return initIndex(new MockRAMDirectory());
  }
View Full Code Here

    return writer;
  }

  private void crash(final IndexWriter writer) throws IOException {
    final MockRAMDirectory dir = (MockRAMDirectory) writer.getDirectory();
    ConcurrentMergeScheduler cms = (ConcurrentMergeScheduler) writer.getMergeScheduler();
    dir.crash();
    cms.sync();
    dir.clearCrash();
  }
View Full Code Here

    dir.clearCrash();
  }

  public void testCrashWhileIndexing() throws IOException {
    IndexWriter writer = initIndex();
    MockRAMDirectory dir = (MockRAMDirectory) writer.getDirectory();
    crash(writer);
    IndexReader reader = IndexReader.open(dir, false);
    assertTrue(reader.numDocs() < 157);
  }
View Full Code Here

    assertTrue(reader.numDocs() < 157);
  }

  public void testWriterAfterCrash() throws IOException {
    IndexWriter writer = initIndex();
    MockRAMDirectory dir = (MockRAMDirectory) writer.getDirectory();
    dir.setPreventDoubleWrite(false);
    crash(writer);
    writer = initIndex(dir);
    writer.close();

    IndexReader reader = IndexReader.open(dir, false);
View Full Code Here

TOP

Related Classes of org.apache.lucene.store.MockRAMDirectory

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.