Examples of Hits


Examples of org.apache.lucene.search.Hits

        }
        writer.close();

        Term searchTerm = new Term("content", "aaa");       
        IndexSearcher searcher = new IndexSearcher(dir);
        Hits hits = searcher.search(new TermQuery(searchTerm));
        assertEquals("first number of hits", 14, hits.length());
        searcher.close();

        IndexReader reader = IndexReader.open(dir);

        writer = new IndexWriter(dir, false, new WhitespaceAnalyzer());
        for(int i=0;i<3;i++) {
          for(int j=0;j<11;j++) {
            addDoc(writer);
          }
          searcher = new IndexSearcher(dir);
          hits = searcher.search(new TermQuery(searchTerm));
          assertEquals("reader incorrectly sees changes from writer with autoCommit disabled", 14, hits.length());
          searcher.close();
          assertTrue("reader should have still been current", reader.isCurrent());
        }

        // Now, close the writer:
        writer.close();
        assertFalse("reader should not be current now", reader.isCurrent());

        searcher = new IndexSearcher(dir);
        hits = searcher.search(new TermQuery(searchTerm));
        assertEquals("reader did not see changes after writer was closed", 47, hits.length());
        searcher.close();
    }
View Full Code Here

Examples of org.apache.lucene.search.Hits

      }
      writer.close();

      Term searchTerm = new Term("content", "aaa");       
      IndexSearcher searcher = new IndexSearcher(dir);
      Hits hits = searcher.search(new TermQuery(searchTerm));
      assertEquals("first number of hits", 14, hits.length());
      searcher.close();

      writer = new IndexWriter(dir, false, new WhitespaceAnalyzer(), false);
      writer.setMaxBufferedDocs(10);
      for(int j=0;j<17;j++) {
        addDoc(writer);
      }
      // Delete all docs:
      writer.deleteDocuments(searchTerm);

      searcher = new IndexSearcher(dir);
      hits = searcher.search(new TermQuery(searchTerm));
      assertEquals("reader incorrectly sees changes from writer with autoCommit disabled", 14, hits.length());
      searcher.close();

      // Now, close the writer:
      writer.abort();

      assertNoUnreferencedFiles(dir, "unreferenced files remain after abort()");

      searcher = new IndexSearcher(dir);
      hits = searcher.search(new TermQuery(searchTerm));
      assertEquals("saw changes after writer.abort", 14, hits.length());
      searcher.close();
         
      // Now make sure we can re-open the index, add docs,
      // and all is good:
      writer = new IndexWriter(dir, false, new WhitespaceAnalyzer(), false);
      writer.setMaxBufferedDocs(10);
      for(int i=0;i<12;i++) {
        for(int j=0;j<17;j++) {
          addDoc(writer);
        }
        searcher = new IndexSearcher(dir);
        hits = searcher.search(new TermQuery(searchTerm));
        assertEquals("reader incorrectly sees changes from writer with autoCommit disabled", 14, hits.length());
        searcher.close();
      }

      writer.close();
      searcher = new IndexSearcher(dir);
      hits = searcher.search(new TermQuery(searchTerm));
      assertEquals("didn't see changes after close", 218, hits.length());
      searcher.close();

      dir.close();
    }
View Full Code Here

Examples of org.apache.lucene.search.Hits

        }
      }
      writer.close();

      IndexSearcher searcher = new IndexSearcher(dir);
      Hits hits = searcher.search(new TermQuery(new Term("field", "aaa")));
      assertEquals(300, hits.length());
      searcher.close();

      dir.close();
    }
View Full Code Here

Examples of org.apache.lucene.search.Hits

      writer.close();

      Term searchTerm = new Term("field", "aaa");

      IndexSearcher searcher = new IndexSearcher(dir);
      Hits hits = searcher.search(new TermQuery(searchTerm));
      assertEquals(10, hits.length());
      searcher.close();

      writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
      writer.setMaxBufferedDocs(10);
      // Enable norms for only 1 doc, post flush
      for(int j=0;j<27;j++) {
        Document doc = new Document();
        Field f = new Field("field", "aaa", Field.Store.YES, Field.Index.TOKENIZED);
        if (j != 26) {
          f.setOmitNorms(true);
        }
        doc.add(f);
        writer.addDocument(doc);
      }
      writer.close();
      searcher = new IndexSearcher(dir);
      hits = searcher.search(new TermQuery(searchTerm));
      assertEquals(27, hits.length());
      searcher.close();

      IndexReader reader = IndexReader.open(dir);
      reader.close();
View Full Code Here

Examples of org.apache.lucene.search.Hits

        addDoc(writer);
      }
      writer.close();
      Term searchTerm = new Term("content", "aaa");       
      IndexSearcher searcher = new IndexSearcher(dir);
      Hits hits = searcher.search(new TermQuery(searchTerm));
      assertEquals("did not get right number of hits", 100, hits.length());
      writer.close();

      writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
      writer.close();
View Full Code Here

Examples of org.apache.lucene.search.Hits

     * @throws  ProcessingException  iff an error occurs
     */
    private LuceneCocoonPager buildHits() throws ProcessingException {

        if (queryString != null && queryString.length() != 0) {
            Hits hits = null;

            // TODO (VG): Move parts into compose/initialize/recycle
            try {
                lcs = (LuceneCocoonSearcher) this.manager.lookup(LuceneCocoonSearcher.ROLE);
                Analyzer analyzer = LuceneCocoonHelper.getAnalyzer("org.apache.lucene.analysis.standard.StandardAnalyzer");
                lcs.setAnalyzer(analyzer);
                // get the directory where the index resides
                Directory directory = LuceneCocoonHelper.getDirectory(index, false);
                lcs.setDirectory(directory);
                hits = lcs.search(queryString, LuceneXMLIndexer.BODY_FIELD);
            } catch (IOException ioe) {
                throw new ProcessingException("IOException in search", ioe);
            } catch (ComponentException ce) {
                throw new ProcessingException("ComponentException in search", ce);
            } finally {
                if (lcs != null) {
                    this.manager.release(lcs);
                    lcs = null;
                }
            }

            // wrap the hits by an pager help object for accessing only a range of hits
            LuceneCocoonPager pager = new LuceneCocoonPager(hits);

            int start_index = START_INDEX_DEFAULT;
            if (this.startIndex != null) {
                start_index = this.startIndex.intValue();
                if (start_index <= 0) {
                    start_index = 0;
                }
                pager.setStartIndex(start_index);
            }

            int page_length = PAGE_LENGTH_DEFAULT;
            if (this.pageLength != null) {
                page_length = this.pageLength.intValue();
                if (page_length <= 0) {
                    page_length = hits.length();
                }
                pager.setCountOfHitsPerPage(page_length);
            }

            return pager;
View Full Code Here

Examples of org.apache.lucene.search.Hits

                                            (CachingMultiReader) parentReader};
            reader = new CombinedIndexReader(readers);
        }

        IndexSearcher searcher = new IndexSearcher(reader);
        Hits hits;
        if (sortFields.length > 0) {
            hits = searcher.search(query, new Sort(sortFields));
        } else {
            hits = searcher.search(query);
        }
View Full Code Here

Examples of org.apache.nutch.searcher.Hits

      NutchBean.LOG.info("query: " + query);
      NutchBean.LOG.info("lang: " + lang);
    }

    // search and return hits
    Hits hits;
    try {
      hits = bean.search(queryObj, start + rows, numDupes, dedup, sort, reverse);
    }
    catch (IOException e) {
      if (NutchBean.LOG.isWarnEnabled()) {
        NutchBean.LOG.warn("Search Error", e);
      }
      hits = new Hits(0, new Hit[0]);
    }

    // get the total number of hits, the hits to show, and the hit details
    long totalHits = hits.getTotal();
    int end = (int)Math.min(hits.getLength(), start + rows);
    int numHits = (end > start) ? (end - start) : 0;
    Hit[] show = hits.getHits(start, numHits);
    HitDetails[] details = bean.getDetails(show);

    // setup the SearchResults object, used in response writing
    SearchResults results = new SearchResults();
    results.setResponseType(respType);
View Full Code Here

Examples of org.apache.nutch.searcher.Hits

      numTotal.incrementAndGet();

      try {
        Query runner = Query.parse(query, conf);
        long start = System.currentTimeMillis();
        Hits hits = bean.search(runner, 10);
        long end = System.currentTimeMillis();
        numResolved.incrementAndGet();
        long total = (end - start);
        if (showTimes) {
          System.out.println("Query for " + query + " numhits "
            + hits.getTotal() + " in " + total + "ms");
        }
        totalTime.addAndGet(total);
      }
      catch (Exception uhe) {
        LOG.info("Error executing search for " + query);
View Full Code Here

Examples of org.exoplatform.services.jcr.impl.core.query.lucene.hits.Hits

            // filter out documents that do not match the name test
            if (nameTest != null)
            {
               Query nameQuery = new NameQuery(nameTest, version, nsMappings);
               Hits nameHits = new ScorerHits(nameQuery.weight(searcher).scorer(reader, true, false));
               for (int i = hits.nextSetBit(0); i >= 0; i = hits.nextSetBit(i + 1))
               {
                  int doc = nameHits.skipTo(i);
                  if (doc == -1)
                  {
                     // no more name tests, clear remaining
                     hits.clear(i, hits.length());
                  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.