Examples of MockDirectoryWrapper


Examples of org.apache.lucene.store.MockDirectoryWrapper

    final long t0 = System.currentTimeMillis();

    final LineFileDocs docs = new LineFileDocs(random);
    final File tempDir = _TestUtil.getTempDir("nrtopenfiles");
    final MockDirectoryWrapper _dir = newFSDirectory(tempDir);
    _dir.setCheckIndexOnClose(false)// don't double-checkIndex, we do it ourselves
    Directory dir = _dir;
    final IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(IndexWriterConfig.OpenMode.CREATE);

    if (LuceneTestCase.TEST_NIGHTLY) {
      // newIWConfig makes smallish max seg size, which
View Full Code Here

Examples of org.apache.lucene.store.MockDirectoryWrapper

            setOpenMode(OpenMode.APPEND).
            setMaxBufferedDocs(10).
            setMergePolicy(newLogMergePolicy(4))
    );

    writer.addIndexes(new Directory[] { aux, new MockDirectoryWrapper(random, new RAMDirectory(aux)) });
    assertEquals(1060, writer.maxDoc());
    assertEquals(1000, writer.getDocCount(0));
    writer.close();

    // make sure the index is correct
View Full Code Here

Examples of org.apache.lucene.store.MockDirectoryWrapper

            setOpenMode(OpenMode.APPEND).
            setMaxBufferedDocs(4).
            setMergePolicy(newLogMergePolicy(4))
    );

    writer.addIndexes(new Directory[] { aux, new MockDirectoryWrapper(random, new RAMDirectory(aux)) });
    assertEquals(1020, writer.maxDoc());
    assertEquals(1000, writer.getDocCount(0));
    writer.close();
    dir.close();
    aux.close();
View Full Code Here

Examples of org.apache.lucene.store.MockDirectoryWrapper

    final static int NUM_THREADS = 5;
    final Thread[] threads = new Thread[NUM_THREADS];

    public RunAddIndexesThreads(int numCopy) throws Throwable {
      NUM_COPY = numCopy;
      dir = new MockDirectoryWrapper(random, new RAMDirectory());
      IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
          TEST_VERSION_CURRENT, new MockAnalyzer(random))
          .setMaxBufferedDocs(2));
      for (int i = 0; i < NUM_INIT_DOCS; i++)
        addDoc(writer);
View Full Code Here

Examples of org.apache.lucene.store.MockDirectoryWrapper

            public void run() {
              try {

                final Directory[] dirs = new Directory[NUM_COPY];
                for(int k=0;k<NUM_COPY;k++)
                  dirs[k] = new MockDirectoryWrapper(random, new RAMDirectory(dir));

                int j=0;

                while(true) {
                  // System.out.println(Thread.currentThread().getName() + ": iter j=" + j);
View Full Code Here

Examples of org.apache.lucene.store.MockDirectoryWrapper

      }
    }
  }

  public void testSubclassConcurrentMergeScheduler() throws IOException {
    MockDirectoryWrapper dir = newDirectory();
    dir.failOn(new FailOnlyOnMerge());

    Document doc = new Document();
    Field idField = newField("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
    doc.add(idField);
   
    IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(
        TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMergeScheduler(new MyMergeScheduler())
        .setMaxBufferedDocs(2).setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)
        .setMergePolicy(newLogMergePolicy()));
    LogMergePolicy logMP = (LogMergePolicy) writer.getConfig().getMergePolicy();
    logMP.setMergeFactor(10);
    for(int i=0;i<20;i++)
      writer.addDocument(doc);

    ((MyMergeScheduler) writer.getConfig().getMergeScheduler()).sync();
    writer.close();
   
    assertTrue(mergeThreadCreated);
    assertTrue(mergeCalled);
    assertTrue(excCalled);
    dir.close();
  }
View Full Code Here

Examples of org.apache.lucene.store.MockDirectoryWrapper

    writer.close();
  }

  public void testTransactions() throws Throwable {
    // we cant use non-ramdir on windows, because this test needs to double-write.
    MockDirectoryWrapper dir1 = new MockDirectoryWrapper(random, new RAMDirectory());
    MockDirectoryWrapper dir2 = new MockDirectoryWrapper(random, new RAMDirectory());
    dir1.setPreventDoubleWrite(false);
    dir2.setPreventDoubleWrite(false);
    dir1.failOn(new RandomFailure());
    dir2.failOn(new RandomFailure());

    initIndex(dir1);
    initIndex(dir2);

    TimedThread[] threads = new TimedThread[3];
    int numThread = 0;

    IndexerThread indexerThread = new IndexerThread(this, dir1, dir2, threads);
    threads[numThread++] = indexerThread;
    indexerThread.start();

    SearcherThread searcherThread1 = new SearcherThread(this, dir1, dir2, threads);
    threads[numThread++] = searcherThread1;
    searcherThread1.start();

    SearcherThread searcherThread2 = new SearcherThread(this, dir1, dir2, threads);
    threads[numThread++] = searcherThread2;
    searcherThread2.start();

    for(int i=0;i<numThread;i++)
      threads[i].join();

    for(int i=0;i<numThread;i++)
      assertTrue(!threads[i].failed);
    dir1.close();
    dir2.close();
  }
View Full Code Here

Examples of org.apache.lucene.store.MockDirectoryWrapper

    final LineFileDocs docs = new LineFileDocs(random);
    final int RUN_TIME_MSEC = atLeast(500);
    final IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setMaxBufferedDocs(-1).setRAMBufferSizeMB(64);
    final File tempDir = _TestUtil.getTempDir("fstlines");
    final MockDirectoryWrapper dir = new MockDirectoryWrapper(random, FSDirectory.open(tempDir));
    final IndexWriter writer = new IndexWriter(dir, conf);
    writer.setInfoStream(VERBOSE ? System.out : null);
    final long stopTime = System.currentTimeMillis() + RUN_TIME_MSEC;
    Document doc;
    int docCount = 0;
    while((doc = docs.nextDoc()) != null && System.currentTimeMillis() < stopTime) {
      writer.addDocument(doc);
      docCount++;
    }
    IndexReader r = IndexReader.open(writer, true);
    writer.close();
    final PositiveIntOutputs outputs = PositiveIntOutputs.getSingleton(random.nextBoolean());
    Builder<Long> builder = new Builder<Long>(FST.INPUT_TYPE.BYTE2, 0, 0, true, outputs);

    boolean storeOrd = false;
    if (VERBOSE) {
      if (storeOrd) {
        System.out.println("FST stores ord");
      } else {
        System.out.println("FST stores docFreq");
      }
    }
    TermEnum termEnum = r.terms(new Term("body", ""));
    if (VERBOSE) {
      System.out.println("TEST: got termEnum=" + termEnum);
    }
    int ord = 0;
    while(true) {
      final Term term = termEnum.term();
      if (term == null || !"body".equals(term.field())) {
        break;
      }

      // No ord in 3.x:
      /*
      if (ord == 0) {
        try {
          termsEnum.ord();
        } catch (UnsupportedOperationException uoe) {
          if (VERBOSE) {
            System.out.println("TEST: codec doesn't support ord; FST stores docFreq");
          }
          storeOrd = false;
        }
      }
      */
      final int output;
      if (storeOrd) {
        output = ord;
      } else {
        output = termEnum.docFreq();
      }
      //System.out.println("ADD: " + term.text() + " ch[0]=" + (term.text().length() == 0 ? -1 : term.text().charAt(0)));
      builder.add(toIntsRef(term.text()), outputs.get(output));
      ord++;
      if (ord % 100000 == 0 && LuceneTestCase.TEST_NIGHTLY) {
        System.out.println(ord + " terms...");
      }
      termEnum.next();
    }
    final FST<Long> fst = builder.finish();
    if (VERBOSE) {
      System.out.println("FST: " + docCount + " docs; " + ord + " terms; " + fst.getNodeCount() + " nodes; " + fst.getArcCount() + " arcs;" + " " + fst.sizeInBytes() + " bytes");
    }

    if (ord > 0) {
      // Now confirm BytesRefFSTEnum and TermEnum act the
      // same:
      final IntsRefFSTEnum<Long> fstEnum = new IntsRefFSTEnum<Long>(fst);
      int num = atLeast(1000);
      for(int iter=0;iter<num;iter++) {
        final String randomTerm = getRandomString();

        if (VERBOSE) {
          System.out.println("TEST: seek " + randomTerm + " ch[0]=" + (randomTerm.length() == 0 ? -1 : randomTerm.charAt(0)));
        }

        termEnum = r.terms(new Term("body", randomTerm));
        final IntsRefFSTEnum.InputOutput fstSeekResult = fstEnum.seekCeil(toIntsRef(randomTerm));

        if (termEnum.term() == null || !"body".equals(termEnum.term().field())) {
          assertNull("got " + (fstSeekResult == null ? "null" : toString(fstSeekResult.input) + " but expected null"), fstSeekResult);
        } else {
          assertSame(termEnum, fstEnum, storeOrd);
          for(int nextIter=0;nextIter<10;nextIter++) {
            if (VERBOSE) {
              System.out.println("TEST: next");
              //if (storeOrd) {
              //System.out.println("  ord=" + termEnum.ord());
              //}
            }
            termEnum.next();
            if (termEnum.term() != null && "body".equals(termEnum.term().field())) {
              if (VERBOSE) {
                System.out.println("  term=" + termEnum.term());
              }
              assertNotNull(fstEnum.next());
              assertSame(termEnum, fstEnum, storeOrd);
            } else {
              if (VERBOSE) {
                System.out.println("  end!");
              }
              IntsRefFSTEnum.InputOutput<Long> nextResult = fstEnum.next();
              if (nextResult != null) {
                System.out.println("expected null but got: input=" + toString(nextResult.input) + " output=" + outputs.outputToString(nextResult.output));
                fail();
              }
              break;
            }
          }
        }
      }
    }

    r.close();
    dir.close();
  }
View Full Code Here

Examples of org.apache.lucene.store.MockDirectoryWrapper

  public void testRandomExceptions() throws Throwable {
    if (VERBOSE) {
      System.out.println("\nTEST: start testRandomExceptions");
    }
    MockDirectoryWrapper dir = newDirectory();

    MockAnalyzer analyzer = new MockAnalyzer(random);
    analyzer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
    MockIndexWriter writer  = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer)
        .setRAMBufferSizeMB(0.1).setMergeScheduler(new ConcurrentMergeScheduler()));
    ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
    //writer.setMaxBufferedDocs(10);
    if (VERBOSE) {
      System.out.println("TEST: initial commit");
    }
    writer.commit();

    if (VERBOSE) {
      writer.setInfoStream(System.out);
    }

    IndexerThread thread = new IndexerThread(0, writer);
    thread.run();
    if (thread.failure != null) {
      thread.failure.printStackTrace(System.out);
      fail("thread " + thread.getName() + ": hit unexpected failure");
    }

    if (VERBOSE) {
      System.out.println("TEST: commit after thread start");
    }
    writer.commit();

    try {
      writer.close();
    } catch (Throwable t) {
      System.out.println("exception during close:");
      t.printStackTrace(System.out);
      writer.rollback();
    }

    // Confirm that when doc hits exception partway through tokenization, it's deleted:
    IndexReader r2 = IndexReader.open(dir, true);
    final int count = r2.docFreq(new Term("content4", "aaa"));
    final int count2 = r2.docFreq(new Term("content4", "ddd"));
    assertEquals(count, count2);
    r2.close();

    dir.close();
  }
View Full Code Here

Examples of org.apache.lucene.store.MockDirectoryWrapper

    dir.close();
  }

  public void testRandomExceptionsThreads() throws Throwable {
    MockDirectoryWrapper dir = newDirectory();
    MockAnalyzer analyzer = new MockAnalyzer(random);
    analyzer.setEnableChecks(false); // disable workflow checking as we forcefully close() in exceptional cases.
    MockIndexWriter writer  = new MockIndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, analyzer)
        .setRAMBufferSizeMB(0.2).setMergeScheduler(new ConcurrentMergeScheduler()));
    ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
    //writer.setMaxBufferedDocs(10);
    writer.commit();

    if (VERBOSE) {
      writer.setInfoStream(System.out);
    }

    final int NUM_THREADS = 4;

    final IndexerThread[] threads = new IndexerThread[NUM_THREADS];
    for(int i=0;i<NUM_THREADS;i++) {
      threads[i] = new IndexerThread(i, writer);
      threads[i].start();
    }

    for(int i=0;i<NUM_THREADS;i++)
      threads[i].join();

    for(int i=0;i<NUM_THREADS;i++)
      if (threads[i].failure != null)
        fail("thread " + threads[i].getName() + ": hit unexpected failure");

    writer.commit();

    try {
      writer.close();
    } catch (Throwable t) {
      System.out.println("exception during close:");
      t.printStackTrace(System.out);
      writer.rollback();
    }

    // Confirm that when doc hits exception partway through tokenization, it's deleted:
    IndexReader r2 = IndexReader.open(dir, true);
    final int count = r2.docFreq(new Term("content4", "aaa"));
    final int count2 = r2.docFreq(new Term("content4", "ddd"));
    assertEquals(count, count2);
    r2.close();

    dir.close();
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.