Examples of MockRAMDirectory


Examples of org.apache.lucene.store.MockRAMDirectory

      writer.close();
      dir.close();
    }

    public void testUndeleteAll() throws IOException {
      Directory dir = new MockRAMDirectory();
      IndexWriter writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
      addDocumentWithFields(writer);
      addDocumentWithFields(writer);
      writer.close();
      IndexReader reader = IndexReader.open(dir, false);
      reader.deleteDocument(0);
      reader.deleteDocument(1);
      reader.undeleteAll();
      reader.close();
      reader = IndexReader.open(dir, false);
      assertEquals(2, reader.numDocs())// nothing has really been deleted thanks to undeleteAll()
      reader.close();
      dir.close();
    }
View Full Code Here

Examples of org.apache.lucene.store.MockRAMDirectory

      reader.close();
      dir.close();
    }

    public void testUndeleteAllAfterClose() throws IOException {
      Directory dir = new MockRAMDirectory();
      IndexWriter writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
      addDocumentWithFields(writer);
      addDocumentWithFields(writer);
      writer.close();
      IndexReader reader = IndexReader.open(dir, false);
      reader.deleteDocument(0);
      reader.deleteDocument(1);
      reader.close();
      reader = IndexReader.open(dir, false);
      reader.undeleteAll();
      assertEquals(2, reader.numDocs())// nothing has really been deleted thanks to undeleteAll()
      reader.close();
      dir.close();
    }
View Full Code Here

Examples of org.apache.lucene.store.MockRAMDirectory

      reader.close();
      dir.close();
    }

    public void testUndeleteAllAfterCloseThenReopen() throws IOException {
      Directory dir = new MockRAMDirectory();
      IndexWriter writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
      addDocumentWithFields(writer);
      addDocumentWithFields(writer);
      writer.close();
      IndexReader reader = IndexReader.open(dir, false);
      reader.deleteDocument(0);
      reader.deleteDocument(1);
      reader.close();
      reader = IndexReader.open(dir, false);
      reader.undeleteAll();
      reader.close();
      reader = IndexReader.open(dir, false);
      assertEquals(2, reader.numDocs())// nothing has really been deleted thanks to undeleteAll()
      reader.close();
      dir.close();
    }
View Full Code Here

Examples of org.apache.lucene.store.MockRAMDirectory

      Term searchTerm = new Term("content", "aaa");
      int START_COUNT = 157;
      int END_COUNT = 144;
     
      // First build up a starting index:
      RAMDirectory startDir = new MockRAMDirectory();
      IndexWriter writer = new IndexWriter(startDir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
      for(int i=0;i<157;i++) {
        Document d = new Document();
        d.add(new Field("id", Integer.toString(i), Field.Store.YES, Field.Index.NOT_ANALYZED));
        d.add(new Field("content", "aaa " + i, Field.Store.NO, Field.Index.ANALYZED));
        writer.addDocument(d);
      }
      writer.close();

      long diskUsage = startDir.sizeInBytes();
      long diskFree = diskUsage+100;     

      IOException err = null;

      boolean done = false;

      // Iterate w/ ever increasing free disk space:
      while(!done) {
        MockRAMDirectory dir = new MockRAMDirectory(startDir);

        // If IndexReader hits disk full, it can write to
        // the same files again.
        dir.setPreventDoubleWrite(false);

        IndexReader reader = IndexReader.open(dir, false);

        // For each disk size, first try to commit against
        // dir that will hit random IOExceptions & disk
        // full; after, give it infinite disk space & turn
        // off random IOExceptions & retry w/ same reader:
        boolean success = false;

        for(int x=0;x<2;x++) {

          double rate = 0.05;
          double diskRatio = ((double) diskFree)/diskUsage;
          long thisDiskFree;
          String testName;

          if (0 == x) {
            thisDiskFree = diskFree;
            if (diskRatio >= 2.0) {
              rate /= 2;
            }
            if (diskRatio >= 4.0) {
              rate /= 2;
            }
            if (diskRatio >= 6.0) {
              rate = 0.0;
            }
            if (debug) {
              System.out.println("\ncycle: " + diskFree + " bytes");
            }
            testName = "disk full during reader.close() @ " + thisDiskFree + " bytes";
          } else {
            thisDiskFree = 0;
            rate = 0.0;
            if (debug) {
              System.out.println("\ncycle: same writer: unlimited disk space");
            }
            testName = "reader re-use after disk full";
          }

          dir.setMaxSizeInBytes(thisDiskFree);
          dir.setRandomIOExceptionRate(rate, diskFree);

          try {
            if (0 == x) {
              int docId = 12;
              for(int i=0;i<13;i++) {
                reader.deleteDocument(docId);
                reader.setNorm(docId, "contents", (float) 2.0);
                docId += 12;
              }
            }
            reader.close();
            success = true;
            if (0 == x) {
              done = true;
            }
          } catch (IOException e) {
            if (debug) {
              System.out.println("  hit IOException: " + e);
              e.printStackTrace(System.out);
            }
            err = e;
            if (1 == x) {
              e.printStackTrace();
              fail(testName + " hit IOException after disk space was freed up");
            }
          }

          // Whether we succeeded or failed, check that all
          // un-referenced files were in fact deleted (ie,
          // we did not create garbage).  Just create a
          // new IndexFileDeleter, have it delete
          // unreferenced files, then verify that in fact
          // no files were deleted:
          String[] startFiles = dir.listAll();
          SegmentInfos infos = new SegmentInfos();
          infos.read(dir);
          new IndexFileDeleter(dir, new KeepOnlyLastCommitDeletionPolicy(), infos, null, null);
          String[] endFiles = dir.listAll();

          Arrays.sort(startFiles);
          Arrays.sort(endFiles);

          //for(int i=0;i<startFiles.length;i++) {
          //  System.out.println("  startFiles: " + i + ": " + startFiles[i]);
          //}

          if (!Arrays.equals(startFiles, endFiles)) {
            String successStr;
            if (success) {
              successStr = "success";
            } else {
              successStr = "IOException";
              err.printStackTrace();
            }
            fail("reader.close() failed to delete unreferenced files after " + successStr + " (" + diskFree + " bytes): before delete:\n    " + arrayToString(startFiles) + "\n  after delete:\n    " + arrayToString(endFiles));
          }

          // Finally, verify index is not corrupt, and, if
          // we succeeded, we see all docs changed, and if
          // we failed, we see either all docs or no docs
          // changed (transactional semantics):
          IndexReader newReader = null;
          try {
            newReader = IndexReader.open(dir, false);
          } catch (IOException e) {
            e.printStackTrace();
            fail(testName + ":exception when creating IndexReader after disk full during close: " + e);
          }
          /*
          int result = newReader.docFreq(searchTerm);
          if (success) {
            if (result != END_COUNT) {
              fail(testName + ": method did not throw exception but docFreq('aaa') is " + result + " instead of expected " + END_COUNT);
            }
          } else {
            // On hitting exception we still may have added
            // all docs:
            if (result != START_COUNT && result != END_COUNT) {
              err.printStackTrace();
              fail(testName + ": method did throw exception but docFreq('aaa') is " + result + " instead of expected " + START_COUNT + " or " + END_COUNT);
            }
          }
          */

          IndexSearcher searcher = new IndexSearcher(newReader);
          ScoreDoc[] hits = null;
          try {
            hits = searcher.search(new TermQuery(searchTerm), null, 1000).scoreDocs;
          } catch (IOException e) {
            e.printStackTrace();
            fail(testName + ": exception when searching: " + e);
          }
          int result2 = hits.length;
          if (success) {
            if (result2 != END_COUNT) {
              fail(testName + ": method did not throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + END_COUNT);
            }
          } else {
            // On hitting exception we still may have added
            // all docs:
            if (result2 != START_COUNT && result2 != END_COUNT) {
              err.printStackTrace();
              fail(testName + ": method did throw exception but hits.length for search on term 'aaa' is " + result2 + " instead of expected " + START_COUNT);
            }
          }

          searcher.close();
          newReader.close();

          if (result2 == END_COUNT) {
            break;
          }
        }

        dir.close();

        // Try again with 10 more bytes of free space:
        diskFree += 10;
      }

View Full Code Here

Examples of org.apache.lucene.store.MockRAMDirectory

      startDir.close();
    }

    public void testDocsOutOfOrderJIRA140() throws IOException {
      Directory dir = new MockRAMDirectory();     
      IndexWriter writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
      for(int i=0;i<11;i++) {
        addDoc(writer, "aaa");
      }
      writer.close();
      IndexReader reader = IndexReader.open(dir, false);

      // Try to delete an invalid docId, yet, within range
      // of the final bits of the BitVector:

      boolean gotException = false;
      try {
        reader.deleteDocument(11);
      } catch (ArrayIndexOutOfBoundsException e) {
        gotException = true;
      }
      reader.close();

      writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);

      // We must add more docs to get a new segment written
      for(int i=0;i<11;i++) {
        addDoc(writer, "aaa");
      }

      // Without the fix for LUCENE-140 this call will
      // [incorrectly] hit a "docs out of order"
      // IllegalStateException because above out-of-bounds
      // deleteDocument corrupted the index:
      writer.optimize();
      writer.close();
      if (!gotException) {
        fail("delete of out-of-bounds doc number failed to hit exception");
      }
      dir.close();
    }
View Full Code Here

Examples of org.apache.lucene.store.MockRAMDirectory

      dir.close();
    }

    public void testExceptionReleaseWriteLockJIRA768() throws IOException {

      Directory dir = new MockRAMDirectory();     
      IndexWriter writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
      addDoc(writer, "aaa");
      writer.close();

      IndexReader reader = IndexReader.open(dir, false);
      try {
        reader.deleteDocument(1);
        fail("did not hit exception when deleting an invalid doc number");
      } catch (ArrayIndexOutOfBoundsException e) {
        // expected
      }
      reader.close();
      if (IndexWriter.isLocked(dir)) {
        fail("write lock is still held after close");
      }

      reader = IndexReader.open(dir, false);
      try {
        reader.setNorm(1, "content", (float) 2.0);
        fail("did not hit exception when calling setNorm on an invalid doc number");
      } catch (ArrayIndexOutOfBoundsException e) {
        // expected
      }
      reader.close();
      if (IndexWriter.isLocked(dir)) {
        fail("write lock is still held after close");
      }
      dir.close();
    }
View Full Code Here

Examples of org.apache.lucene.store.MockRAMDirectory

      }
    }

    public void testGetIndexCommit() throws IOException {

      RAMDirectory d = new MockRAMDirectory();

      // set up writer
      IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
      writer.setMaxBufferedDocs(2);
      for(int i=0;i<27;i++)
        addDocumentWithFields(writer);
      writer.close();

      SegmentInfos sis = new SegmentInfos();
      sis.read(d);
      IndexReader r = IndexReader.open(d, false);
      IndexCommit c = r.getIndexCommit();

      assertEquals(sis.getCurrentSegmentFileName(), c.getSegmentsFileName());

      assertTrue(c.equals(r.getIndexCommit()));

      // Change the index
      writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
      writer.setMaxBufferedDocs(2);
      for(int i=0;i<7;i++)
        addDocumentWithFields(writer);
      writer.close();

      IndexReader r2 = r.reopen();
      assertFalse(c.equals(r2.getIndexCommit()));
      assertFalse(r2.getIndexCommit().isOptimized());
      r2.close();

      writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
      writer.optimize();
      writer.close();

      r2 = r.reopen();
      assertTrue(r2.getIndexCommit().isOptimized());

      r.close();
      r2.close();
      d.close();
    }     
View Full Code Here

Examples of org.apache.lucene.store.MockRAMDirectory

      r2.close();
      d.close();
    }     

    public void testReadOnly() throws Throwable {
      RAMDirectory d = new MockRAMDirectory();
      IndexWriter writer = new IndexWriter(d, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
      addDocumentWithFields(writer);
      writer.commit();
      addDocumentWithFields(writer);
      writer.close();
View Full Code Here

Examples of org.apache.lucene.store.MockRAMDirectory

    IndexReader.open(dir,true).close();
  }

  // LUCENE-1647
  public void testIndexReaderUnDeleteAll() throws Exception {
    MockRAMDirectory dir = new MockRAMDirectory();
    dir.setPreventDoubleWrite(false);
    IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT),
                                         IndexWriter.MaxFieldLength.UNLIMITED);
    writer.addDocument(createDocument("a"));
    writer.addDocument(createDocument("b"));
    writer.addDocument(createDocument("c"));
    writer.close();
    IndexReader reader = IndexReader.open(dir, false);
    reader.deleteDocuments(new Term("id", "a"));
    reader.flush();
    reader.deleteDocuments(new Term("id", "b"));
    reader.undeleteAll();
    reader.deleteDocuments(new Term("id", "b"));
    reader.close();
    IndexReader.open(dir,true).close();
    dir.close();
  }
View Full Code Here

Examples of org.apache.lucene.store.MockRAMDirectory

  }

  // LUCENE-1509
  public void testNoDupCommitFileNames() throws Throwable {

    Directory dir = new MockRAMDirectory();
   
    IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(org.apache.lucene.util.Version.LUCENE_CURRENT),
                                         IndexWriter.MaxFieldLength.LIMITED);

    writer.setMaxBufferedDocs(2);
    writer.addDocument(createDocument("a"));
    writer.addDocument(createDocument("a"));
    writer.addDocument(createDocument("a"));
    writer.close();
   
    Collection commits = IndexReader.listCommits(dir);
    Iterator it = commits.iterator();
    while(it.hasNext()) {
      IndexCommit commit = (IndexCommit) it.next();
      Collection files = commit.getFileNames();
      HashSet seen = new HashSet();
      Iterator it2 = files.iterator();
      while(it2.hasNext()) {
        String fileName = (String) it2.next();
        assertTrue("file " + fileName + " was duplicated", !seen.contains(fileName));
        seen.add(fileName);
      }
    }

    dir.close();
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.