Package org.hibernate.search.engine

Examples of org.hibernate.search.engine.SearchFactoryImplementor


      return;
    }

    // accessing the document builders is not strictly necessary but a small optimization plus let's make sure the
    // client didn't mess something up.
    SearchFactoryImplementor searchFactoryImplementor = getSearchFactoryImplementor();
    DocumentBuilder builder = searchFactoryImplementor.getDocumentBuilder( entityType );
    if ( builder == null ) {
      String msg = "Entity to index is not an @Indexed entity: " + entityType.getName();
      throw new IllegalArgumentException( msg );
    }

    Work work;
    if ( id == null ) {
      // purge the main entity
      work = new Work( entityType, id, WorkType.PURGE_ALL );
      searchFactoryImplementor.getWorker().performWork( work, transactionContext );

      // purge the subclasses
      Set<Class<?>> subClasses = builder.getMappedSubclasses();
      for ( Class clazz : subClasses ) {
        work = new Work( clazz, id, WorkType.PURGE_ALL );
        searchFactoryImplementor.getWorker().performWork( work, transactionContext );
      }
    }
    else {
      work = new Work( entityType, id, WorkType.PURGE );
      searchFactoryImplementor.getWorker().performWork( work, transactionContext );
    }
  }
View Full Code Here


      throw new IllegalArgumentException( "Entity to index should not be null" );
    }

    Class<?> clazz = Hibernate.getClass( entity );
    //TODO cache that at the FTSession level
    SearchFactoryImplementor searchFactoryImplementor = getSearchFactoryImplementor();
    //not strictly necessary but a small optimization
    if ( searchFactoryImplementor.getDocumentBuilder( clazz ) == null ) {
      String msg = "Entity to index is not an @Indexed entity: " + entity.getClass().getName();
      throw new IllegalArgumentException( msg );
    }
    Serializable id = session.getIdentifier( entity );
    Work work = new Work( entity, id, WorkType.INDEX );
    searchFactoryImplementor.getWorker().performWork( work, transactionContext );

    //TODO
    //need to add elements in a queue kept at the Session level
    //the queue will be processed by a Lucene(Auto)FlushEventListener
    //note that we could keep this queue somewhere in the event listener in the mean time but that requires
View Full Code Here

    //implement an interator which keep the id/class for each hit and get the object on demand
    //cause I can't keep the searcher and hence the hit opened. I dont have any hook to know when the
    //user stop using it
    //scrollable is better in this area

    SearchFactoryImplementor searchFactoryImplementor = ContextHelper.getSearchFactoryBySFI( session );
    //find the directories
    IndexSearcher searcher = buildSearcher( searchFactoryImplementor );
    if ( searcher == null ) {
      return new IteratorImpl( Collections.EMPTY_LIST, noLoader );
    }
    try {
      QueryAndHits queryAndHits = getQueryAndHits( searcher );
      int first = first();
      int max = max( first, queryAndHits.hits );
      Session sess = (Session) this.session;

      int size = max - first + 1 < 0 ? 0 : max - first + 1;
      List<EntityInfo> infos = new ArrayList<EntityInfo>( size );
      DocumentExtractor extractor = new DocumentExtractor( queryAndHits.preparedQuery, searcher, searchFactoryImplementor, indexProjection );
      for (int index = first; index <= max; index++) {
        //TODO use indexSearcher.getIndexReader().document( hits.id(index), FieldSelector(indexProjection) );
        infos.add( extractor.extract( queryAndHits.hits, index ) );
      }
      Loader loader = getLoader( sess, searchFactoryImplementor );
      return new IteratorImpl( infos, loader );
    }
    catch (IOException e) {
      throw new HibernateException( "Unable to query Lucene index", e );
    }
    finally {
      try {
        closeSearcher( searcher, searchFactoryImplementor.getReaderProvider() );
      }
      catch (SearchException e) {
        log.warn( "Unable to properly close searcher during lucene query: " + getQueryString(), e );
      }
    }
View Full Code Here

    }
  }

  public ScrollableResults scroll() throws HibernateException {
    //keep the searcher open until the resultset is closed
    SearchFactoryImplementor searchFactory = ContextHelper.getSearchFactoryBySFI( session );

    //find the directories
    IndexSearcher searcher = buildSearcher( searchFactory );
    //FIXME: handle null searcher
    try {
      QueryAndHits queryAndHits = getQueryAndHits( searcher );
      int first = first();
      int max = max( first, queryAndHits.hits );
      DocumentExtractor extractor = new DocumentExtractor( queryAndHits.preparedQuery, searcher, searchFactory, indexProjection );
      Loader loader = getLoader( (Session) this.session, searchFactory );
      return new ScrollableResultsImpl( searcher, queryAndHits.hits, first, max, fetchSize, extractor, loader, searchFactory );
    }
    catch (IOException e) {
      //close only in case of exception
      try {
        closeSearcher( searcher, searchFactory.getReaderProvider() );
      }
      catch (SearchException ee) {
        //we have the initial issue already
      }
      throw new HibernateException( "Unable to query Lucene index", e );
View Full Code Here

    //TODO think about this scrollmode
    return scroll();
  }

  public List list() throws HibernateException {
    SearchFactoryImplementor searchFactoryImplementor = ContextHelper.getSearchFactoryBySFI( session );
    //find the directories
    IndexSearcher searcher = buildSearcher( searchFactoryImplementor );
    if ( searcher == null ) return Collections.EMPTY_LIST;
    try {
      QueryAndHits queryAndHits = getQueryAndHits( searcher );
      int first = first();
      int max = max( first, queryAndHits.hits );
      Session sess = (Session) this.session;

      int size = max - first + 1 < 0 ? 0 : max - first + 1;
      List<EntityInfo> infos = new ArrayList<EntityInfo>( size );
      DocumentExtractor extractor = new DocumentExtractor( queryAndHits.preparedQuery, searcher, searchFactoryImplementor, indexProjection );
      for (int index = first; index <= max; index++) {
        infos.add( extractor.extract( queryAndHits.hits, index ) );
      }
      Loader loader = getLoader( sess, searchFactoryImplementor );
      List list = loader.load( infos.toArray( new EntityInfo[infos.size()] ) );
      if ( resultTransformer == null || loader instanceof ProjectionLoader ) {
        //stay consistent with transformTuple which can only be executed during a projection
        return list;
      }
      else {
        return resultTransformer.transformList( list );
      }
    }
    catch (IOException e) {
      throw new HibernateException( "Unable to query Lucene index", e );
    }
    finally {
      try {
        closeSearcher( searcher, searchFactoryImplementor.getReaderProvider() );
      }
      catch (SearchException e) {
        log.warn( "Unable to properly close searcher during lucene query: " + getQueryString(), e );
      }
    }
View Full Code Here

    }
  }

  public Explanation explain(int documentId) {
    Explanation explanation = null;
    SearchFactoryImplementor searchFactoryImplementor = getSearchFactoryImplementor();
    Searcher searcher = buildSearcher( searchFactoryImplementor );
    if (searcher == null) {
      throw new SearchException("Unable to build explanation for document id:"
          + documentId + ". no index found");
    }
    try {
      org.apache.lucene.search.Query query = filterQueryByClasses( luceneQuery );
      buildFilters();
      explanation = searcher.explain( query, documentId );
    }
    catch (IOException e) {
      throw new HibernateException( "Unable to query Lucene index and build explanation", e );
    }
    finally {
      //searcher cannot be null
      try {
        closeSearcher( searcher, searchFactoryImplementor.getReaderProvider() );
      }
      catch (SearchException e) {
        log.warn( "Unable to properly close searcher during lucene query: " + getQueryString(), e );
      }
    }
View Full Code Here

   * Lucene <code>Filter</code>.
   * @return the Lucene filter mapped to the filter definition
   */
  private Filter buildLuceneFilter(FullTextFilterImpl fullTextFilter) {

    SearchFactoryImplementor searchFactoryImplementor = getSearchFactoryImplementor();

    /*
     * FilterKey implementations and Filter(Factory) do not have to be threadsafe wrt their parameter injection
     * as FilterCachingStrategy ensure a memory barrier between concurrent thread calls
     */
    FilterDef def = searchFactoryImplementor.getFilterDefinition( fullTextFilter.getName() );
    Object instance = createFilterInstance(fullTextFilter, def);
    FilterKey key = createFilterKey(def, instance);

    // try to get the filter out of the cache
    Filter filter = cacheInstance( def.getCacheMode() ) ?
        searchFactoryImplementor.getFilterCachingStrategy().getCachedFilter( key ) :
        null;

    if ( filter == null ) {
      filter = createFilter(def, instance);

      // add filter to cache if we have to
      if ( cacheInstance( def.getCacheMode() ) ) {
        searchFactoryImplementor.getFilterCachingStrategy().addCachedFilter( key, filter );
      }
    }
    return filter;
  }
View Full Code Here


  public int getResultSize() {
    if ( resultSize == null ) {
      //get result size without object initialization
      SearchFactoryImplementor searchFactoryImplementor = ContextHelper.getSearchFactoryBySFI( session );
      IndexSearcher searcher = buildSearcher( searchFactoryImplementor );
      if ( searcher == null ) {
        resultSize = 0;
      }
      else {
        Hits hits;
        try {
          hits = getQueryAndHits( searcher ).hits;
          resultSize = hits.length();
        }
        catch (IOException e) {
          throw new HibernateException( "Unable to query Lucene index", e );
        }
        finally {
          //searcher cannot be null
          try {
            closeSearcher( searcher, searchFactoryImplementor.getReaderProvider() );
            //searchFactoryImplementor.getReaderProvider().closeReader( searcher.getIndexReader() );
          }
          catch (SearchException e) {
            log.warn( "Unable to properly close searcher during lucene query: " + getQueryString(), e );
          }
View Full Code Here

  public <T> void purgeAll(Class<T> entityType) {
    purge( entityType, null );
  }

  public void flushToIndexes() {
    SearchFactoryImplementor searchFactoryImplementor = getSearchFactoryImplementor();
    searchFactoryImplementor.getWorker().flushWorks( transactionContext );
  }
View Full Code Here

      throw new IllegalArgumentException( "Entity to index should not be null" );
    }

    Class<?> clazz = HibernateHelper.getClass( entity );
    //TODO cache that at the FTSession level
    SearchFactoryImplementor searchFactoryImplementor = getSearchFactoryImplementor();
    //not strictly necessary but a small optimization
    if ( searchFactoryImplementor.getDocumentBuilderIndexedEntity( clazz ) == null ) {
      String msg = "Entity to index is not an @Indexed entity: " + entity.getClass().getName();
      throw new IllegalArgumentException( msg );
    }
    Serializable id = session.getIdentifier( entity );
    Work<T> work = new Work<T>( entity, id, WorkType.INDEX );
    searchFactoryImplementor.getWorker().performWork( work, transactionContext );

    //TODO
    //need to add elements in a queue kept at the Session level
    //the queue will be processed by a Lucene(Auto)FlushEventListener
    //note that we could keep this queue somewhere in the event listener in the mean time but that requires
View Full Code Here

TOP

Related Classes of org.hibernate.search.engine.SearchFactoryImplementor

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.