Package org.apache.lucene.search

Examples of org.apache.lucene.search.MultiPhraseQuery


            } else {
                return new TermQuery(newFulltextTerm(token, fieldName));
            }
        } else {
            if (hasFulltextToken(tokens)) {
                MultiPhraseQuery mpq = new MultiPhraseQuery();
                for(String token: tokens){
                    if (hasFulltextToken(token)) {
                        Term[] terms = extractMatchingTokens(reader, fieldName, token);
                        if (terms != null && terms.length > 0) {
                            mpq.add(terms);
                        }
                    } else {
                        mpq.add(newFulltextTerm(token, fieldName));
                    }
                }
                return mpq;
            } else {
                PhraseQuery pq = new PhraseQuery();
View Full Code Here


        perFieldQuery = query;
      }
    }
    else {
      if (isMultiPhrase) {
        MultiPhraseQuery query = new MultiPhraseQuery();
        query.setSlop( phraseContext.getSlop() );
        for ( Map.Entry<Integer,List<Term>> entry : termsPerPosition.entrySet() ) {
          final List<Term> value = entry.getValue();
          query.add( value.toArray( new Term[value.size()] ), entry.getKey() );
        }
        perFieldQuery = query;
      }
      else {
        PhraseQuery query = new PhraseQuery();
        query.setSlopphraseContext.getSlop() );
        for ( Map.Entry<Integer,List<Term>> entry : termsPerPosition.entrySet() ) {
          final List<Term> value = entry.getValue();
          query.add( value.get(0), entry.getKey() );
        }
        perFieldQuery = query;
      }
    }
    return fieldContext.getFieldCustomizer().setWrappedQuery( perFieldQuery ).createQuery();
View Full Code Here

        perFieldQuery = query;
      }
    }
    else {
      if (isMultiPhrase) {
        MultiPhraseQuery query = new MultiPhraseQuery();
        query.setSlop( phraseContext.getSlop() );
        for ( Map.Entry<Integer,List<Term>> entry : termsPerPosition.entrySet() ) {
          final List<Term> value = entry.getValue();
          query.add( value.toArray( new Term[value.size()] ), entry.getKey() );
        }
        perFieldQuery = query;
      }
      else {
        PhraseQuery query = new PhraseQuery();
        query.setSlopphraseContext.getSlop() );
        for ( Map.Entry<Integer,List<Term>> entry : termsPerPosition.entrySet() ) {
          final List<Term> value = entry.getValue();
          query.add( value.get(0), entry.getKey() );
        }
        perFieldQuery = query;
      }
    }
    return fieldContext.getFieldCustomizer().setWrappedQuery( perFieldQuery ).createQuery();
View Full Code Here

            } else {
                return new TermQuery(newFulltextTerm(token, fieldName));
            }
        } else {
            if (hasFulltextToken(tokens)) {
                MultiPhraseQuery mpq = new MultiPhraseQuery();
                for(String token: tokens){
                    if (hasFulltextToken(token)) {
                        Term[] terms = extractMatchingTokens(reader, fieldName, token);
                        if (terms != null && terms.length > 0) {
                            mpq.add(terms);
                        }
                    } else {
                        mpq.add(newFulltextTerm(token, fieldName));
                    }
                }
                return mpq;
            } else {
                PhraseQuery pq = new PhraseQuery();
View Full Code Here

          }
          return q;
        }
        else {
          // phrase query:
          MultiPhraseQuery mpq = newMultiPhraseQuery();
          mpq.setSlop(phraseSlop);
          List<Term> multiTerms = new ArrayList<Term>();
          int position = -1;
          for (int i = 0; i < numTokens; i++) {
            String term = null;
            int positionIncrement = 1;
            try {
              boolean hasNext = buffer.incrementToken();
              assert hasNext == true;
              term = termAtt.toString();
              if (posIncrAtt != null) {
                positionIncrement = posIncrAtt.getPositionIncrement();
              }
            } catch (IOException e) {
              // safe to ignore, because we know the number of tokens
            }

            if (positionIncrement > 0 && multiTerms.size() > 0) {
              if (enablePositionIncrements) {
                mpq.add(multiTerms.toArray(new Term[0]),position);
              } else {
                mpq.add(multiTerms.toArray(new Term[0]));
              }
              multiTerms.clear();
            }
            position += positionIncrement;
            multiTerms.add(new Term(field, term));
          }
          if (enablePositionIncrements) {
            mpq.add(multiTerms.toArray(new Term[0]),position);
          } else {
            mpq.add(multiTerms.toArray(new Term[0]));
          }
          return mpq;
        }
      }
      else {
View Full Code Here

  /**
   * Builds a new MultiPhraseQuery instance
   * @return new MultiPhraseQuery instance
   */
  protected MultiPhraseQuery newMultiPhraseQuery(){
    return new MultiPhraseQuery();
  }
View Full Code Here

        perFieldQuery = query;
      }
    }
    else {
      if ( isMultiPhrase ) {
        MultiPhraseQuery query = new MultiPhraseQuery();
        query.setSlop( phraseContext.getSlop() );
        for ( Map.Entry<Integer,List<Term>> entry : termsPerPosition.entrySet() ) {
          final List<Term> value = entry.getValue();
          query.add( value.toArray( new Term[value.size()] ), entry.getKey() );
        }
        perFieldQuery = query;
      }
      else {
        PhraseQuery query = new PhraseQuery();
        query.setSlop( phraseContext.getSlop() );
        for ( Map.Entry<Integer,List<Term>> entry : termsPerPosition.entrySet() ) {
          final List<Term> value = entry.getValue();
          query.add( value.get( 0 ), entry.getKey() );
        }
        perFieldQuery = query;
      }
    }
    return fieldContext.getFieldCustomizer().setWrappedQuery( perFieldQuery ).createQuery();
View Full Code Here

        perFieldQuery = query;
      }
    }
    else {
      if (isMultiPhrase) {
        MultiPhraseQuery query = new MultiPhraseQuery();
        query.setSlop( phraseContext.getSlop() );
        for ( Map.Entry<Integer,List<Term>> entry : termsPerPosition.entrySet() ) {
          final List<Term> value = entry.getValue();
          query.add( value.toArray( new Term[value.size()] ), entry.getKey() );
        }
        perFieldQuery = query;
      }
      else {
        PhraseQuery query = new PhraseQuery();
        query.setSlopphraseContext.getSlop() );
        for ( Map.Entry<Integer,List<Term>> entry : termsPerPosition.entrySet() ) {
          final List<Term> value = entry.getValue();
          query.add( value.get(0), entry.getKey() );
        }
        perFieldQuery = query;
      }
    }
    return fieldContext.getFieldCustomizer().setWrappedQuery( perFieldQuery ).createQuery();
View Full Code Here

                        iterationSlug));
        TermQuery localeQuery =
                new TermQuery(new Term(IndexFieldLabels.LOCALE_ID_FIELD,
                        localeId.getId()));

        MultiPhraseQuery documentsQuery = new MultiPhraseQuery();
        if (documentPaths != null && !documentPaths.isEmpty()) {
            ArrayList<Term> docPathTerms = new ArrayList<Term>();
            for (String s : documentPaths) {
                docPathTerms
                        .add(new Term(IndexFieldLabels.DOCUMENT_ID_FIELD, s));
            }
            documentsQuery.add(docPathTerms.toArray(new Term[docPathTerms
                    .size()]));
        }

        List<HTextFlow> resultList = new ArrayList<HTextFlow>();
        if (constraints.isSearchInTarget()) {
            // Content query for target
            String targetAnalyzerName =
                    TextContainerAnalyzerDiscriminator
                            .getAnalyzerDefinitionName(localeId.getId());
            Analyzer targetAnalyzer =
                    entityManager.getSearchFactory().getAnalyzer(
                            targetAnalyzerName);

            Query tgtContentPhraseQuery;
            QueryParser contentQueryParser =
                    new MultiFieldQueryParser(Version.LUCENE_29,
                            IndexFieldLabels.CONTENT_FIELDS, targetAnalyzer);
            try {
                tgtContentPhraseQuery =
                        contentQueryParser.parse("\""
                                + QueryParser.escape(constraints
                                        .getSearchString()) + "\"");
            } catch (ParseException e) {
                throw new ZanataServiceException("Failed to parse query", e);
            }

            // Target Query
            BooleanQuery targetQuery = new BooleanQuery();
            targetQuery.add(projectQuery, Occur.MUST);
            targetQuery.add(iterationQuery, Occur.MUST);
            targetQuery.add(tgtContentPhraseQuery, Occur.MUST);
            if (documentsQuery.getTermArrays().size() > 0) {
                targetQuery.add(documentsQuery, Occur.MUST);
            }
            targetQuery.add(localeQuery, Occur.MUST);

            if (!constraints.getIncludedStates().hasTranslated()) {
                TermQuery approvedStateQuery =
                        new TermQuery(new Term(
                                IndexFieldLabels.CONTENT_STATE_FIELD,
                                ContentState.Approved.toString()));
                targetQuery.add(approvedStateQuery, Occur.MUST_NOT);
            }

            if (!constraints.getIncludedStates().hasFuzzy()) {
                TermQuery approvedStateQuery =
                        new TermQuery(new Term(
                                IndexFieldLabels.CONTENT_STATE_FIELD,
                                ContentState.NeedReview.toString()));
                targetQuery.add(approvedStateQuery, Occur.MUST_NOT);
            }

            if (!constraints.getIncludedStates().hasNew()) {
                TermQuery approvedStateQuery =
                        new TermQuery(new Term(
                                IndexFieldLabels.CONTENT_STATE_FIELD,
                                ContentState.New.toString()));
                targetQuery.add(approvedStateQuery, Occur.MUST_NOT);
            }

            FullTextQuery ftQuery =
                    entityManager.createFullTextQuery(targetQuery,
                            HTextFlowTarget.class);
            @SuppressWarnings("unchecked")
            List<HTextFlowTarget> matchedTargets =
                    (List<HTextFlowTarget>) ftQuery.getResultList();
            log.info("got {} HTextFLowTarget results", matchedTargets.size());
            for (HTextFlowTarget htft : matchedTargets) {
                // manually check for case sensitive matches
                if (!constraints.isCaseSensitive()
                        || (contentIsValid(htft.getContents(), constraints))) {
                    if (!htft.getTextFlow().getDocument().isObsolete()) {
                        resultList.add(htft.getTextFlow());
                    }
                }
            }
        }

        if (constraints.isSearchInSource()) {
            // Source locale
            // NB: Assume the first document's locale, or the same target locale
            // if there are no documents
            // TODO Move source locale to the Project iteration level
            LocaleId sourceLocaleId = localeId;
            HProjectIteration projectIteration =
                    projectIterationDAO.getBySlug(projectSlug, iterationSlug);
            if (!projectIteration.getDocuments().isEmpty()) {
                sourceLocaleId =
                        projectIteration.getDocuments().values().iterator()
                                .next().getLocale().getLocaleId();
            }

            // Content query for source
            String sourceAnalyzerName =
                    TextContainerAnalyzerDiscriminator
                            .getAnalyzerDefinitionName(sourceLocaleId.getId());
            Analyzer sourceAnalyzer =
                    entityManager.getSearchFactory().getAnalyzer(
                            sourceAnalyzerName);

            Query srcContentPhraseQuery;
            QueryParser srcContentQueryParser =
                    new MultiFieldQueryParser(Version.LUCENE_29,
                            IndexFieldLabels.CONTENT_FIELDS, sourceAnalyzer);
            try {
                srcContentPhraseQuery =
                        srcContentQueryParser.parse("\""
                                + QueryParser.escape(constraints
                                        .getSearchString()) + "\"");
            } catch (ParseException e) {
                throw new ZanataServiceException("Failed to parse query", e);
            }

            // Source Query
            BooleanQuery sourceQuery = new BooleanQuery();
            sourceQuery.add(projectQuery, Occur.MUST);
            sourceQuery.add(iterationQuery, Occur.MUST);
            sourceQuery.add(srcContentPhraseQuery, Occur.MUST);
            if (documentsQuery.getTermArrays().size() > 0) {
                sourceQuery.add(documentsQuery, Occur.MUST);
            }

            FullTextQuery ftQuery =
                    entityManager.createFullTextQuery(sourceQuery,
View Full Code Here

        this.terms.get(this.terms.size() - 1), reader);
    if (prefixTerms == null) {
      return new MatchNoDocsQuery();
    }

    MultiPhraseQuery query = new MultiPhraseQuery();
    for (int i = 0; i < this.terms.size() - 1; i++) {
      query.add(new Term(this.field, this.terms.get(i)));
    }
    query.add(prefixTerms);
    return query;
  }
View Full Code Here

TOP

Related Classes of org.apache.lucene.search.MultiPhraseQuery

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.