Examples of FieldQueryNode


Examples of org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode

    if (children != null) {
      TreeMap<Integer, List<Term>> positionTermMap = new TreeMap<Integer, List<Term>>();

      for (QueryNode child : children) {
        FieldQueryNode termNode = (FieldQueryNode) child;
        TermQuery termQuery = (TermQuery) termNode
            .getTag(QueryTreeBuilder.QUERY_TREE_BUILDER_TAGID);
        List<Term> termList = positionTermMap.get(termNode
            .getPositionIncrement());

        if (termList == null) {
          termList = new LinkedList<Term>();
          positionTermMap.put(termNode.getPositionIncrement(), termList);

        }

        termList.add(termQuery.getTerm());
View Full Code Here

Examples of org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode

  }

  final public QueryNode Clause(CharSequence field) throws ParseException {
  QueryNode q;
  Token fieldToken=null, boost=null, operator=null, term=null;
  FieldQueryNode qLower, qUpper;
  boolean lowerInclusive, upperInclusive;

  boolean group = false;
    if (jj_2_2(3)) {
      fieldToken = jj_consume_token(TERM);
      switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
      case OP_COLON:
      case OP_EQUAL:
        switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
        case OP_COLON:
          jj_consume_token(OP_COLON);
          break;
        case OP_EQUAL:
          jj_consume_token(OP_EQUAL);
          break;
        default:
          jj_la1[7] = jj_gen;
          jj_consume_token(-1);
          throw new ParseException();
        }
                                 field=EscapeQuerySyntaxImpl.discardEscapeChar(fieldToken.image);
        q = Term(field);
        break;
      case OP_LESSTHAN:
      case OP_LESSTHANEQ:
      case OP_MORETHAN:
      case OP_MORETHANEQ:
        switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
        case OP_LESSTHAN:
          operator = jj_consume_token(OP_LESSTHAN);
          break;
        case OP_LESSTHANEQ:
          operator = jj_consume_token(OP_LESSTHANEQ);
          break;
        case OP_MORETHAN:
          operator = jj_consume_token(OP_MORETHAN);
          break;
        case OP_MORETHANEQ:
          operator = jj_consume_token(OP_MORETHANEQ);
          break;
        default:
          jj_la1[8] = jj_gen;
          jj_consume_token(-1);
          throw new ParseException();
        }
                                                                                                               field=EscapeQuerySyntaxImpl.discardEscapeChar(fieldToken.image);
        switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
        case TERM:
          term = jj_consume_token(TERM);
          break;
        case QUOTED:
          term = jj_consume_token(QUOTED);
          break;
        case NUMBER:
          term = jj_consume_token(NUMBER);
          break;
        default:
          jj_la1[9] = jj_gen;
          jj_consume_token(-1);
          throw new ParseException();
        }
        if (term.kind == QUOTED) {
            term.image = term.image.substring(1, term.image.length()-1);
        }
        switch (operator.kind) {
            case OP_LESSTHAN:
              lowerInclusive = true;
              upperInclusive = false;

              qLower = new FieldQueryNode(field,
                                         "*", term.beginColumn, term.endColumn);
            qUpper = new FieldQueryNode(field,
                                 EscapeQuerySyntaxImpl.discardEscapeChar(term.image), term.beginColumn, term.endColumn);

                break;
            case OP_LESSTHANEQ:
              lowerInclusive = true;
              upperInclusive = true;

                qLower = new FieldQueryNode(field,
                                         "*", term.beginColumn, term.endColumn);
                qUpper = new FieldQueryNode(field,
                                         EscapeQuerySyntaxImpl.discardEscapeChar(term.image), term.beginColumn, term.endColumn);
                break;
            case OP_MORETHAN:
              lowerInclusive = false;
              upperInclusive = true;

                qLower = new FieldQueryNode(field,
                                         EscapeQuerySyntaxImpl.discardEscapeChar(term.image), term.beginColumn, term.endColumn);
                qUpper = new FieldQueryNode(field,
                                         "*", term.beginColumn, term.endColumn);
                break;
            case OP_MORETHANEQ:
              lowerInclusive = true;
              upperInclusive = true;

                qLower = new FieldQueryNode(field,
                                         EscapeQuerySyntaxImpl.discardEscapeChar(term.image), term.beginColumn, term.endColumn);
                qUpper = new FieldQueryNode(field,
                                         "*", term.beginColumn, term.endColumn);
                break;
            default:
                {if (true) throw new Error("Unhandled case: operator="+operator.toString());}
        }
View Full Code Here

Examples of org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode

  boolean fuzzy = false;
  boolean regexp = false;
  boolean startInc=false;
  boolean endInc=false;
  QueryNode q =null;
  FieldQueryNode qLower, qUpper;
  float defaultMinSimilarity = org.apache.lucene.search.FuzzyQuery.defaultMinSimilarity;
    switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
    case TERM:
    case REGEXPTERM:
    case NUMBER:
      switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
      case TERM:
        term = jj_consume_token(TERM);
                    q = new FieldQueryNode(field, EscapeQuerySyntaxImpl.discardEscapeChar(term.image), term.beginColumn, term.endColumn);
        break;
      case REGEXPTERM:
        term = jj_consume_token(REGEXPTERM);
                             regexp=true;
        break;
      case NUMBER:
        term = jj_consume_token(NUMBER);
        break;
      default:
        jj_la1[15] = jj_gen;
        jj_consume_token(-1);
        throw new ParseException();
      }
      switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
      case FUZZY_SLOP:
        fuzzySlop = jj_consume_token(FUZZY_SLOP);
                                fuzzy=true;
        break;
      default:
        jj_la1[16] = jj_gen;
        ;
      }
      switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
      case CARAT:
        jj_consume_token(CARAT);
        boost = jj_consume_token(NUMBER);
        switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
        case FUZZY_SLOP:
          fuzzySlop = jj_consume_token(FUZZY_SLOP);
                                                         fuzzy=true;
          break;
        default:
          jj_la1[17] = jj_gen;
          ;
        }
        break;
      default:
        jj_la1[18] = jj_gen;
        ;
      }
       if (fuzzy) {
           float fms = defaultMinSimilarity;
           try {
            fms = Float.valueOf(fuzzySlop.image.substring(1)).floatValue();
           } catch (Exception ignored) { }
          if(fms < 0.0f){
            {if (true) throw new ParseException(new MessageImpl(QueryParserMessages.INVALID_SYNTAX_FUZZY_LIMITS));}
          } else if (fms >= 1.0f && fms != (int) fms) {
            {if (true) throw new ParseException(new MessageImpl(QueryParserMessages.INVALID_SYNTAX_FUZZY_EDITS));}
          }
          q = new FuzzyQueryNode(field, EscapeQuerySyntaxImpl.discardEscapeChar(term.image), fms, term.beginColumn, term.endColumn);
       } else if (regexp) {
         String re = term.image.substring(1, term.image.length()-1);
         q = new RegexpQueryNode(field, re, 0, re.length());
       }
      break;
    case RANGEIN_START:
    case RANGEEX_START:
      switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
      case RANGEIN_START:
        jj_consume_token(RANGEIN_START);
                            startInc=true;
        break;
      case RANGEEX_START:
        jj_consume_token(RANGEEX_START);
        break;
      default:
        jj_la1[19] = jj_gen;
        jj_consume_token(-1);
        throw new ParseException();
      }
      switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
      case RANGE_GOOP:
        goop1 = jj_consume_token(RANGE_GOOP);
        break;
      case RANGE_QUOTED:
        goop1 = jj_consume_token(RANGE_QUOTED);
        break;
      default:
        jj_la1[20] = jj_gen;
        jj_consume_token(-1);
        throw new ParseException();
      }
      switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
      case RANGE_TO:
        jj_consume_token(RANGE_TO);
        break;
      default:
        jj_la1[21] = jj_gen;
        ;
      }
      switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
      case RANGE_GOOP:
        goop2 = jj_consume_token(RANGE_GOOP);
        break;
      case RANGE_QUOTED:
        goop2 = jj_consume_token(RANGE_QUOTED);
        break;
      default:
        jj_la1[22] = jj_gen;
        jj_consume_token(-1);
        throw new ParseException();
      }
      switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
      case RANGEIN_END:
        jj_consume_token(RANGEIN_END);
                          endInc=true;
        break;
      case RANGEEX_END:
        jj_consume_token(RANGEEX_END);
        break;
      default:
        jj_la1[23] = jj_gen;
        jj_consume_token(-1);
        throw new ParseException();
      }
      switch ((jj_ntk==-1)?jj_ntk():jj_ntk) {
      case CARAT:
        jj_consume_token(CARAT);
        boost = jj_consume_token(NUMBER);
        break;
      default:
        jj_la1[24] = jj_gen;
        ;
      }
          if (goop1.kind == RANGE_QUOTED) {
            goop1.image = goop1.image.substring(1, goop1.image.length()-1);
          }
          if (goop2.kind == RANGE_QUOTED) {
            goop2.image = goop2.image.substring(1, goop2.image.length()-1);
          }

          qLower = new FieldQueryNode(field,
                                   EscapeQuerySyntaxImpl.discardEscapeChar(goop1.image), goop1.beginColumn, goop1.endColumn);
      qUpper = new FieldQueryNode(field,
                                   EscapeQuerySyntaxImpl.discardEscapeChar(goop2.image), goop2.beginColumn, goop2.endColumn);
          q = new TermRangeQueryNode(qLower, qUpper, startInc ? true : false, endInc ? true : false);
      break;
    case QUOTED:
      term = jj_consume_token(QUOTED);
View Full Code Here

Examples of org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode

        && !(node instanceof WildcardQueryNode)
        && !(node instanceof FuzzyQueryNode)
        && !(node instanceof RegexpQueryNode)
        && !(node.getParent() instanceof RangeQueryNode)) {

      FieldQueryNode fieldNode = ((FieldQueryNode) node);
      String text = fieldNode.getTextAsString();
      String field = fieldNode.getFieldAsString();

      TokenStream source;
      try {
        source = this.analyzer.tokenStream(field, text);
        source.reset();
      } catch (IOException e1) {
        throw new RuntimeException(e1);
      }
      CachingTokenFilter buffer = new CachingTokenFilter(source);

      PositionIncrementAttribute posIncrAtt = null;
      int numTokens = 0;
      int positionCount = 0;
      boolean severalTokensAtSamePosition = false;

      if (buffer.hasAttribute(PositionIncrementAttribute.class)) {
        posIncrAtt = buffer.getAttribute(PositionIncrementAttribute.class);
      }

      try {

        while (buffer.incrementToken()) {
          numTokens++;
          int positionIncrement = (posIncrAtt != null) ? posIncrAtt
              .getPositionIncrement() : 1;
          if (positionIncrement != 0) {
            positionCount += positionIncrement;

          } else {
            severalTokensAtSamePosition = true;
          }

        }

      } catch (IOException e) {
        // ignore
      }

      try {
        // rewind the buffer stream
        buffer.reset();

        // close original stream - all tokens buffered
        source.close();
      } catch (IOException e) {
        // ignore
      }

      if (!buffer.hasAttribute(CharTermAttribute.class)) {
        return new NoTokenFoundQueryNode();
      }

      CharTermAttribute termAtt = buffer.getAttribute(CharTermAttribute.class);

      if (numTokens == 0) {
        return new NoTokenFoundQueryNode();

      } else if (numTokens == 1) {
        String term = null;
        try {
          boolean hasNext;
          hasNext = buffer.incrementToken();
          assert hasNext == true;
          term = termAtt.toString();

        } catch (IOException e) {
          // safe to ignore, because we know the number of tokens
        }

        fieldNode.setText(term);

        return fieldNode;

      } else if (severalTokensAtSamePosition || !(node instanceof QuotedFieldQueryNode)) {
        if (positionCount == 1 || !(node instanceof QuotedFieldQueryNode)) {
          // no phrase query:
          LinkedList<QueryNode> children = new LinkedList<QueryNode>();

          for (int i = 0; i < numTokens; i++) {
            String term = null;
            try {
              boolean hasNext = buffer.incrementToken();
              assert hasNext == true;
              term = termAtt.toString();

            } catch (IOException e) {
              // safe to ignore, because we know the number of tokens
            }

            children.add(new FieldQueryNode(field, term, -1, -1));

          }
          return new GroupQueryNode(
            new StandardBooleanQueryNode(children, positionCount==1));
        } else {
          // phrase query:
          MultiPhraseQueryNode mpq = new MultiPhraseQueryNode();

          List<FieldQueryNode> multiTerms = new ArrayList<FieldQueryNode>();
          int position = -1;
          int i = 0;
          int termGroupCount = 0;
          for (; i < numTokens; i++) {
            String term = null;
            int positionIncrement = 1;
            try {
              boolean hasNext = buffer.incrementToken();
              assert hasNext == true;
              term = termAtt.toString();
              if (posIncrAtt != null) {
                positionIncrement = posIncrAtt.getPositionIncrement();
              }

            } catch (IOException e) {
              // safe to ignore, because we know the number of tokens
            }

            if (positionIncrement > 0 && multiTerms.size() > 0) {

              for (FieldQueryNode termNode : multiTerms) {

                if (this.positionIncrementsEnabled) {
                  termNode.setPositionIncrement(position);
                } else {
                  termNode.setPositionIncrement(termGroupCount);
                }

                mpq.add(termNode);

              }

              // Only increment once for each "group" of
              // terms that were in the same position:
              termGroupCount++;

              multiTerms.clear();

            }

            position += positionIncrement;
            multiTerms.add(new FieldQueryNode(field, term, -1, -1));

          }

          for (FieldQueryNode termNode : multiTerms) {

            if (this.positionIncrementsEnabled) {
              termNode.setPositionIncrement(position);

            } else {
              termNode.setPositionIncrement(termGroupCount);
            }

            mpq.add(termNode);

          }

          return mpq;

        }

      } else {

        TokenizedPhraseQueryNode pq = new TokenizedPhraseQueryNode();

        int position = -1;

        for (int i = 0; i < numTokens; i++) {
          String term = null;
          int positionIncrement = 1;

          try {
            boolean hasNext = buffer.incrementToken();
            assert hasNext == true;
            term = termAtt.toString();

            if (posIncrAtt != null) {
              positionIncrement = posIncrAtt.getPositionIncrement();
            }

          } catch (IOException e) {
            // safe to ignore, because we know the number of tokens
          }

          FieldQueryNode newFieldNode = new FieldQueryNode(field, term, -1, -1);

          if (this.positionIncrementsEnabled) {
            position += positionIncrement;
            newFieldNode.setPositionIncrement(position);

          } else {
            newFieldNode.setPositionIncrement(i);
          }

          pq.add(newFieldNode);

        }
View Full Code Here

Examples of org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode

 
  @Test
  public void testSetFieldBuilder() throws QueryNodeException {
    QueryTreeBuilder qtb = new QueryTreeBuilder();
    qtb.setBuilder("field", new DummyBuilder());
    Object result = qtb.build(new FieldQueryNode(new UnescapedCharSequence("field"), "foo", 0, 0));
    Assert.assertEquals("OK", result);
   
  }
View Full Code Here

Examples of org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode

        && !(node instanceof WildcardQueryNode)
        && !(node instanceof FuzzyQueryNode)
        && !(node instanceof RegexpQueryNode)
        && !(node.getParent() instanceof RangeQueryNode)) {

      FieldQueryNode fieldNode = ((FieldQueryNode) node);
      String text = fieldNode.getTextAsString();
      String field = fieldNode.getFieldAsString();

      TokenStream source;
      try {
        source = this.analyzer.tokenStream(field, new StringReader(text));
        source.reset();
      } catch (IOException e1) {
        throw new RuntimeException(e1);
      }
      CachingTokenFilter buffer = new CachingTokenFilter(source);

      PositionIncrementAttribute posIncrAtt = null;
      int numTokens = 0;
      int positionCount = 0;
      boolean severalTokensAtSamePosition = false;

      if (buffer.hasAttribute(PositionIncrementAttribute.class)) {
        posIncrAtt = buffer.getAttribute(PositionIncrementAttribute.class);
      }

      try {

        while (buffer.incrementToken()) {
          numTokens++;
          int positionIncrement = (posIncrAtt != null) ? posIncrAtt
              .getPositionIncrement() : 1;
          if (positionIncrement != 0) {
            positionCount += positionIncrement;

          } else {
            severalTokensAtSamePosition = true;
          }

        }

      } catch (IOException e) {
        // ignore
      }

      try {
        // rewind the buffer stream
        buffer.reset();

        // close original stream - all tokens buffered
        source.close();
      } catch (IOException e) {
        // ignore
      }

      if (!buffer.hasAttribute(CharTermAttribute.class)) {
        return new NoTokenFoundQueryNode();
      }

      CharTermAttribute termAtt = buffer.getAttribute(CharTermAttribute.class);

      if (numTokens == 0) {
        return new NoTokenFoundQueryNode();

      } else if (numTokens == 1) {
        String term = null;
        try {
          boolean hasNext;
          hasNext = buffer.incrementToken();
          assert hasNext == true;
          term = termAtt.toString();

        } catch (IOException e) {
          // safe to ignore, because we know the number of tokens
        }

        fieldNode.setText(term);

        return fieldNode;

      } else if (severalTokensAtSamePosition || !(node instanceof QuotedFieldQueryNode)) {
        if (positionCount == 1 || !(node instanceof QuotedFieldQueryNode)) {
          // no phrase query:
          LinkedList<QueryNode> children = new LinkedList<QueryNode>();

          for (int i = 0; i < numTokens; i++) {
            String term = null;
            try {
              boolean hasNext = buffer.incrementToken();
              assert hasNext == true;
              term = termAtt.toString();

            } catch (IOException e) {
              // safe to ignore, because we know the number of tokens
            }

            children.add(new FieldQueryNode(field, term, -1, -1));

          }
          return new GroupQueryNode(
            new StandardBooleanQueryNode(children, positionCount==1));
        } else {
          // phrase query:
          MultiPhraseQueryNode mpq = new MultiPhraseQueryNode();

          List<FieldQueryNode> multiTerms = new ArrayList<FieldQueryNode>();
          int position = -1;
          int i = 0;
          int termGroupCount = 0;
          for (; i < numTokens; i++) {
            String term = null;
            int positionIncrement = 1;
            try {
              boolean hasNext = buffer.incrementToken();
              assert hasNext == true;
              term = termAtt.toString();
              if (posIncrAtt != null) {
                positionIncrement = posIncrAtt.getPositionIncrement();
              }

            } catch (IOException e) {
              // safe to ignore, because we know the number of tokens
            }

            if (positionIncrement > 0 && multiTerms.size() > 0) {

              for (FieldQueryNode termNode : multiTerms) {

                if (this.positionIncrementsEnabled) {
                  termNode.setPositionIncrement(position);
                } else {
                  termNode.setPositionIncrement(termGroupCount);
                }

                mpq.add(termNode);

              }

              // Only increment once for each "group" of
              // terms that were in the same position:
              termGroupCount++;

              multiTerms.clear();

            }

            position += positionIncrement;
            multiTerms.add(new FieldQueryNode(field, term, -1, -1));

          }

          for (FieldQueryNode termNode : multiTerms) {

            if (this.positionIncrementsEnabled) {
              termNode.setPositionIncrement(position);

            } else {
              termNode.setPositionIncrement(termGroupCount);
            }

            mpq.add(termNode);

          }

          return mpq;

        }

      } else {

        TokenizedPhraseQueryNode pq = new TokenizedPhraseQueryNode();

        int position = -1;

        for (int i = 0; i < numTokens; i++) {
          String term = null;
          int positionIncrement = 1;

          try {
            boolean hasNext = buffer.incrementToken();
            assert hasNext == true;
            term = termAtt.toString();

            if (posIncrAtt != null) {
              positionIncrement = posIncrAtt.getPositionIncrement();
            }

          } catch (IOException e) {
            // safe to ignore, because we know the number of tokens
          }

          FieldQueryNode newFieldNode = new FieldQueryNode(field, term, -1, -1);

          if (this.positionIncrementsEnabled) {
            position += positionIncrement;
            newFieldNode.setPositionIncrement(position);

          } else {
            newFieldNode.setPositionIncrement(i);
          }

          pq.add(newFieldNode);

        }
View Full Code Here

Examples of org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode

    if (children != null) {

      for (QueryNode child : children) {
        TermQuery termQuery = (TermQuery) child
            .getTag(QueryTreeBuilder.QUERY_TREE_BUILDER_TAGID);
        FieldQueryNode termNode = (FieldQueryNode) child;

        phraseQuery.add(termQuery.getTerm(), termNode.getPositionIncrement());

      }

    }
View Full Code Here

Examples of org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode

  }
 
  @Override
  public TermRangeQuery build(QueryNode queryNode) throws QueryNodeException {
    TermRangeQueryNode rangeNode = (TermRangeQueryNode) queryNode;
    FieldQueryNode upper = rangeNode.getUpperBound();
    FieldQueryNode lower = rangeNode.getLowerBound();
   
    String field = StringUtils.toString(rangeNode.getField());
    String lowerText = lower.getTextAsString();
    String upperText = upper.getTextAsString();
   
    if (lowerText.length() == 0) {
      lowerText = null;
    }
View Full Code Here

Examples of org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode

    if (children != null) {
      TreeMap<Integer, List<Term>> positionTermMap = new TreeMap<>();

      for (QueryNode child : children) {
        FieldQueryNode termNode = (FieldQueryNode) child;
        TermQuery termQuery = (TermQuery) termNode
            .getTag(QueryTreeBuilder.QUERY_TREE_BUILDER_TAGID);
        List<Term> termList = positionTermMap.get(termNode
            .getPositionIncrement());

        if (termList == null) {
          termList = new LinkedList<>();
          positionTermMap.put(termNode.getPositionIncrement(), termList);

        }

        termList.add(termQuery.getTerm());
View Full Code Here

Examples of org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode

    // empty constructor
  }

  @Override
  public TermQuery build(QueryNode queryNode) throws QueryNodeException {
    FieldQueryNode fieldNode = (FieldQueryNode) queryNode;

    return new TermQuery(new Term(fieldNode.getFieldAsString(), fieldNode
        .getTextAsString()));

  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.