Package org.apache.lucene.analysis

Examples of org.apache.lucene.analysis.WhitespaceAnalyzer


  @Override
  public void setUp() throws Exception {
    super.setUp();
    RAMDirectory directory = new RAMDirectory();
    IndexWriter writer= new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
    for (int i = 0; i < docFields.length; i++) {
      Document doc = new Document();
      doc.add(new Field(field, docFields[i], Field.Store.NO, Field.Index.ANALYZED));
      writer.addDocument(doc);
    }
    writer.close();
    searcher = new IndexSearcher(directory, true);

    // Make big index
    dir2 = new MockRAMDirectory(directory);

    // First multiply small test index:
    mulFactor = 1;
    int docCount = 0;
    do {
      final Directory copy = new RAMDirectory(dir2);
      IndexWriter w = new IndexWriter(dir2, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
      w.addIndexesNoOptimize(new Directory[] {copy});
      docCount = w.maxDoc();
      w.close();
      mulFactor *= 2;
    } while(docCount < 3000);

    IndexWriter w = new IndexWriter(dir2, new WhitespaceAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
    Document doc = new Document();
    doc.add(new Field("field2", "xxx", Field.Store.NO, Field.Index.ANALYZED));
    for(int i=0;i<NUM_EXTRA_DOCS/2;i++) {
      w.addDocument(doc);
    }
View Full Code Here


    "w1 xx w2 yy w3",
    "w1 w3 xx w2 yy w3"
  };

  public Query makeQuery(String queryText) throws ParseException {
    Query q = (new QueryParser(Version.LUCENE_CURRENT, field, new WhitespaceAnalyzer())).parse(queryText);
    return q;
  }
View Full Code Here

  @Override
  protected void setUp() throws IOException {
    directory = new RAMDirectory();

    IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
   
    setUpPlotter( 2, 15);
   
    addData(writer);
   
View Full Code Here

*/
public class TestUnoptimizedReaderOnConstructor extends TestCase {

  public void test() throws Exception {
    Directory dir = new RAMDirectory();
    IndexWriter iw = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
    addDocument(iw, "Hello, world!");
    addDocument(iw, "All work and no play makes jack a dull boy");
    iw.close();

    iw = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED);
    addDocument(iw, "Hello, tellus!");
    addDocument(iw, "All work and no play makes danny a dull boy");
    iw.close();

    iw = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED);
    addDocument(iw, "Hello, earth!");
    addDocument(iw, "All work and no play makes wendy a dull girl");
    iw.close();

    IndexReader unoptimizedReader = IndexReader.open(dir, false);
View Full Code Here

    RAMDirectory directory = new RAMDirectory();

    String[] values = new String[] { "1", "2", "3", "4" };

    try {
      IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
      for (int i = 0; i < values.length; i++) {
        Document doc = new Document();
        doc.add(new Field(FIELD, values[i], Field.Store.YES, Field.Index.NOT_ANALYZED));
        writer.addDocument(doc);
      }
View Full Code Here

  }

  public void testRandomExceptions() throws Throwable {
    MockRAMDirectory dir = new MockRAMDirectory();

    MockIndexWriter writer  = new MockIndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
    ((ConcurrentMergeScheduler) writer.getMergeScheduler()).setSuppressExceptions();
    //writer.setMaxBufferedDocs(10);
    writer.setRAMBufferSizeMB(0.1);

    if (DEBUG)
View Full Code Here

  }

  public void testRandomExceptionsThreads() throws Throwable {

    MockRAMDirectory dir = new MockRAMDirectory();
    MockIndexWriter writer  = new MockIndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
    ((ConcurrentMergeScheduler) writer.getMergeScheduler()).setSuppressExceptions();
    //writer.setMaxBufferedDocs(10);
    writer.setRAMBufferSizeMB(0.2);

    if (DEBUG)
View Full Code Here

  Random random = new Random();
  HeavyAtomicInt seq = new HeavyAtomicInt(1);

  public void testIndexing() throws Exception {
    Directory mainDir = new MockRAMDirectory();
    IndexWriter writer = new IndexWriter(mainDir, new WhitespaceAnalyzer(),
        IndexWriter.MaxFieldLength.LIMITED);
    writer.setUseCompoundFile(false);
    IndexReader reader = writer.getReader(); // start pooling readers
    reader.close();
    writer.setMergeFactor(2);
View Full Code Here

        IndexWriter writer = null;
        IndexReader reader = null;
        Term searchTerm = new Term("content", "aaa");

        //  add 100 documents with term : aaa
        writer  = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
        for (int i = 0; i < 100; i++)
        {
            addDoc(writer, searchTerm.text());
        }
        writer.close();
View Full Code Here

    public void testBinaryFields() throws IOException
    {
        Directory dir = new RAMDirectory();
        byte[] bin = new byte[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
       
        IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.UNLIMITED);
       
        for (int i = 0; i < 10; i++) {
          addDoc(writer, "document number " + (i + 1));
          addDocumentWithFields(writer);
          addDocumentWithDifferentFields(writer);
          addDocumentWithTermVectorFields(writer);
        }
        writer.close();
        writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
        Document doc = new Document();
        doc.add(new Field("bin1", bin, Field.Store.YES));
        doc.add(new Field("junk", "junk text", Field.Store.NO, Field.Index.ANALYZED));
        writer.addDocument(doc);
        writer.close();
        IndexReader reader = IndexReader.open(dir, false);
        doc = reader.document(reader.maxDoc() - 1);
        Field[] fields = doc.getFields("bin1");
        assertNotNull(fields);
        assertEquals(1, fields.length);
        Field b1 = fields[0];
        assertTrue(b1.isBinary());
        byte[] data1 = b1.getBinaryValue();
        assertEquals(bin.length, b1.getBinaryLength());
        for (int i = 0; i < bin.length; i++) {
          assertEquals(bin[i], data1[i + b1.getBinaryOffset()]);
        }
        Set lazyFields = new HashSet();
        lazyFields.add("bin1");
        FieldSelector sel = new SetBasedFieldSelector(new HashSet(), lazyFields);
        doc = reader.document(reader.maxDoc() - 1, sel);
        Fieldable[] fieldables = doc.getFieldables("bin1");
        assertNotNull(fieldables);
        assertEquals(1, fieldables.length);
        Fieldable fb1 = fieldables[0];
        assertTrue(fb1.isBinary());
        assertEquals(bin.length, fb1.getBinaryLength());
        data1 = fb1.getBinaryValue();
        assertEquals(bin.length, fb1.getBinaryLength());
        for (int i = 0; i < bin.length; i++) {
          assertEquals(bin[i], data1[i + fb1.getBinaryOffset()]);
        }
        reader.close();
        // force optimize


        writer = new IndexWriter(dir, new WhitespaceAnalyzer(), false, IndexWriter.MaxFieldLength.LIMITED);
        writer.optimize();
        writer.close();
        reader = IndexReader.open(dir, false);
        doc = reader.document(reader.maxDoc() - 1);
        fields = doc.getFields("bin1");
View Full Code Here

TOP

Related Classes of org.apache.lucene.analysis.WhitespaceAnalyzer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.