Package org.apache.lucene.facet.taxonomy.directory

Examples of org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter


    Directory dir1 = newDirectory();
    Directory taxDir1 = newDirectory();
    buildIndexWithFacets(dir1, taxDir1, false, fip);
   
    IndexWriter destIndexWriter = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
    DirectoryTaxonomyWriter destTaxWriter = new DirectoryTaxonomyWriter(taxDir1);
    try {
      TaxonomyMergeUtils.merge(dir, taxDir, new MemoryOrdinalMap(), destIndexWriter, destTaxWriter, fip);
    } finally {
      IOUtils.close(destIndexWriter, destTaxWriter);
    }
View Full Code Here


  private void buildIndexWithFacets(Directory dir, Directory taxDir, boolean asc, FacetIndexingParams fip) throws IOException {
    IndexWriterConfig config = newIndexWriterConfig(TEST_VERSION_CURRENT,
        new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false));
    RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config);
   
    DirectoryTaxonomyWriter taxonomyWriter = new DirectoryTaxonomyWriter(taxDir);
    for (int i = 1; i <= NUM_DOCS; i++) {
      Document doc = new Document();
      List<CategoryPath> categoryPaths = new ArrayList<CategoryPath>(i + 1);
      for (int j = i; j <= NUM_DOCS; j++) {
        int facetValue = asc? j: NUM_DOCS - j;
        categoryPaths.add(new CategoryPath("tag", Integer.toString(facetValue)));
      }
      FacetFields facetFields = new FacetFields(taxonomyWriter, fip);
      facetFields.addFields(doc, categoryPaths);
      writer.addDocument(doc);
    }   
    taxonomyWriter.close();
    writer.close();
 
View Full Code Here

    initCache();

    Directory indexDir = newDirectory();
    Directory taxoDir = newDirectory();
    IndexWriter indexWriter = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, null));
    TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
   
    FacetIndexingParams iParams = new FacetIndexingParams() {
      @Override
      public int getPartitionSize() {
        return partitionSize;
View Full Code Here

    Random random = random();
    IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random));
    conf.setMaxBufferedDocs(2); // force few segments
    conf.setMergePolicy(NoMergePolicy.COMPOUND_FILES); // avoid merges so that we're left with few segments
    IndexWriter indexWriter = new IndexWriter(indexDir, conf);
    TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
   
    FacetFields facetFields = new PayloadFacetFields(taxoWriter, fip);
   
    HashMap<String,Integer> expectedCounts = new HashMap<String,Integer>(DIMENSIONS.length);
    int numDocs = atLeast(10);
    for (int i = 0; i < numDocs; i++) {
      Document doc = new Document();
      int numCategories = random.nextInt(3) + 1;
      ArrayList<CategoryPath> categories = new ArrayList<CategoryPath>(numCategories);
      HashSet<String> docDimensions = new HashSet<String>();
      while (numCategories-- > 0) {
        String dim = DIMENSIONS[random.nextInt(DIMENSIONS.length)];
        // we should only increment the expected count by 1 per document
        docDimensions.add(dim);
        categories.add(new CategoryPath(dim, Integer.toString(i), Integer.toString(numCategories)));
      }
      facetFields.addFields(doc, categories);
      doc.add(new StringField("docid", Integer.toString(i), Store.YES));
      doc.add(new TextField("foo", "content" + i, Store.YES));
      indexWriter.addDocument(doc);

      // update expected count per dimension
      for (String dim : docDimensions) {
        Integer val = expectedCounts.get(dim);
        if (val == null) {
          expectedCounts.put(dim, Integer.valueOf(1));
        } else {
          expectedCounts.put(dim, Integer.valueOf(val.intValue() + 1));
        }
      }
     
      if (random.nextDouble() < 0.2) { // add some documents that will be deleted
        doc = new Document();
        doc.add(new StringField("del", "key", Store.NO));
        facetFields.addFields(doc, Collections.singletonList(new CategoryPath("dummy")));
        indexWriter.addDocument(doc);
      }
    }
   
    indexWriter.commit();
    taxoWriter.commit();

    // delete the docs that were marked for deletion. note that the 'dummy'
    // category is not removed from the taxonomy, so must account for it when we
    // verify the migrated index.
    indexWriter.deleteDocuments(new Term("del", "key"));
View Full Code Here

   
    // index some documents
    Directory indexDir = newDirectory();
    Directory taxoDir = newDirectory();
    IndexWriter indexWriter = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, null).setMaxBufferedDocs(2));
    TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
    FacetFields facetFields = new FacetFields(taxoWriter, indexingParams);
    int ndocs = atLeast(random, 10);
    for (int i = 0; i < ndocs; i++) {
      Document doc = new Document();
      int numCategories = random.nextInt(numDimensions) + 1;
View Full Code Here

  }
 
  private void initIndex(Directory indexDir, Directory taxoDir) throws IOException {
    IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
    IndexWriter indexWriter = new IndexWriter(indexDir, conf);
    DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
    FacetFields facetFields = new FacetFields(taxoWriter);
    indexWriter.addDocument(newDocument(facetFields, "Date/2010/March/12", "A/1"));
    indexWriter.addDocument(newDocument(facetFields, "Date/2010/March/23", "A/2"));
    indexWriter.addDocument(newDocument(facetFields, "Date/2010/April/17", "A/3"));
    indexWriter.addDocument(newDocument(facetFields, "Date/2010/May/18", "A/1"));
View Full Code Here

public class TestSearcherTaxonomyManager extends FacetTestCase {
  public void test() throws Exception {
    Directory dir = newDirectory();
    Directory taxoDir = newDirectory();
    final IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
    final DirectoryTaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir);
    final FacetFields facetFields = new FacetFields(tw);
    final AtomicBoolean stop = new AtomicBoolean();

    // How many unique facets to index before stopping:
    final int ordLimit = TEST_NIGHTLY ? 100000 : 6000;

    Thread indexer = new Thread() {
        @Override
        public void run() {
          try {
            Set<String> seen = new HashSet<String>();
            List<String> paths = new ArrayList<String>();
            while (true) {
              Document doc = new Document();
              List<CategoryPath> docPaths = new ArrayList<CategoryPath>();
              int numPaths = _TestUtil.nextInt(random(), 1, 5);
              for(int i=0;i<numPaths;i++) {
                String path;
                if (!paths.isEmpty() && random().nextInt(5) != 4) {
                  // Use previous path
                  path = paths.get(random().nextInt(paths.size()));
                } else {
                  // Create new path
                  path = null;
                  while (true) {
                    path = _TestUtil.randomRealisticUnicodeString(random());
                    if (path.length() != 0 && !seen.contains(path) && path.indexOf(FacetIndexingParams.DEFAULT_FACET_DELIM_CHAR) == -1) {
                      seen.add(path);
                      paths.add(path);
                      break;
                    }
                  }
                }
                docPaths.add(new CategoryPath("field", path));
              }
              try {
                facetFields.addFields(doc, docPaths);
                w.addDocument(doc);
              } catch (IOException ioe) {
                throw new RuntimeException(ioe);
              }

              if (tw.getSize() >= ordLimit) {
                break;
              }
            }
          } finally {
            stop.set(true);
View Full Code Here

  public void testReplaceTaxonomy() throws Exception {
    Directory dir = newDirectory();
    Directory taxoDir = newDirectory();
    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())));
    DirectoryTaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir);

    Directory taxoDir2 = newDirectory();
    DirectoryTaxonomyWriter tw2 = new DirectoryTaxonomyWriter(taxoDir2);
    tw2.close();

    SearcherTaxonomyManager mgr = new SearcherTaxonomyManager(w, true, null, tw);
    w.addDocument(new Document());
    tw.replaceTaxonomy(taxoDir2);
    taxoDir2.close();
View Full Code Here

    // 4. Segment w/ categories, but only some results
   
    IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()));
    conf.setMergePolicy(NoMergePolicy.COMPOUND_FILES); // prevent merges, so we can control the index segments
    IndexWriter indexWriter = new IndexWriter(indexDir, conf);
    TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);

    Map<String,OrdinalPolicy> policies = new HashMap<String,CategoryListParams.OrdinalPolicy>();
    policies.put(CP_B.components[0], OrdinalPolicy.ALL_PARENTS);
    policies.put(CP_C.components[0], OrdinalPolicy.NO_PARENTS);
    policies.put(CP_D.components[0], OrdinalPolicy.NO_PARENTS);
View Full Code Here

      @Override
      public CategoryListIterator createCategoryListIterator(int partition) throws IOException {
        return new AssertingCategoryListIterator(super.createCategoryListIterator(partition));
      }
    });
    TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
    FacetFields facetFields = new FacetFields(taxoWriter, fip);
    indexTwoDocs(indexWriter, facetFields, false); // 1st segment, no content, with categories
    indexTwoDocs(indexWriter, null, true);         // 2nd segment, with content, no categories
    indexTwoDocs(indexWriter, facetFields, true)// 3rd segment ok
    indexTwoDocs(indexWriter, null, false);        // 4th segment, no content, or categories
View Full Code Here

TOP

Related Classes of org.apache.lucene.facet.taxonomy.directory.DirectoryTaxonomyWriter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.