Examples of TokenStream


Examples of antlr.TokenStream

    /** This is a bit of plumbing which resumes collection of string constructor bodies,
     *  after an embedded expression has been parsed.
     *  Usage:  new GroovyRecognizer(new GroovyLexer(in).plumb()).
     */
    public TokenStream plumb() {
        return new TokenStream() {
            public Token nextToken() throws TokenStreamException {
                if (stringCtorState >= SCS_LIT) {
                    // This goo is modeled upon the ANTLR code for nextToken:
                    int quoteType = (stringCtorState & SCS_TYPE);
                    stringCtorState = 0// get out of this mode, now
View Full Code Here

Examples of com.dotcms.repackage.org.antlr.runtime.TokenStream

      Logger.error(SQLQueryFactory.class,e.getMessage(),e);
      throw new DotRuntimeException(e.getMessage(), e);
    }
        TokenSource lexer = new CmisSqlLexer(input);
       
        TokenStream tokens = new CommonTokenStream(lexer);
        CommonTree tree;
    try {
      tree = (CommonTree) new CmisSqlParser(tokens).query().getTree();
    } catch (RecognitionException e) {
      Logger.error(SQLQueryFactory.class,e.getMessage(),e);
View Full Code Here

Examples of com.floreysoft.jmte.token.TokenStream

  public InterpretedTemplate(String template, String sourceName, Engine engine) {
    this.template = template;
    this.engine = engine;
    this.sourceName = sourceName;
    tokenStream = new TokenStream(sourceName, template, engine
        .getExprStartToken(), engine.getExprEndToken());
    tokenStream.prefill();
  }
View Full Code Here

Examples of com.google.gwt.dev.js.rhino.TokenStream

      }
    });
    try {
      // Parse using the Rhino parser.
      //
      TokenStream ts = new TokenStream(r, rootSourceInfo.getFileName(),
          rootSourceInfo.getStartLine());
      Parser parser = new Parser(new IRFactory(ts));
      Node topNode = (Node) parser.parse(ts);

      // Map the Rhino AST to ours.
View Full Code Here

Examples of edu.buffalo.cse.ir.wikiindexer.tokenizer.TokenStream

    try {
      while (remaining > 0) {
        idoc = pool.take().get();
        if (idoc != null) {
          currDocId = docDict.lookup(idoc.getDocumentIdentifier());
          TokenStream stream;
          try {
            for (INDEXFIELD fld : INDEXFIELD.values()) {
              stream = idoc.getStream(fld);

              if (stream != null) {
                tokenmap = stream.getTokenMap();

                if (tokenmap != null) {
                  switch (fld) {
                  case TERM:
                    termRunner.addToIndex(tokenmap,
View Full Code Here

Examples of org.allspice.parser.parsetable.TokenStream

public class TestJavaTokenizer extends TestCase {
  public TokenStream makeTokenizer(String s) {
    return new JavaTokenizer(new StringReader(s)) ;
  }
  public void testfoo() throws SyntaxError {
    TokenStream tok = makeTokenizer("+") ;
    {
      Token t = tok.next() ;
      assertEquals(t.name,"+") ;
    }
  }
View Full Code Here

Examples of org.antlr.runtime.TokenStream

    public void testRuleParseLhs() throws Exception {
        final String text = "Person(age < 42, location==\"atlanta\") \nor\nPerson(name==\"bob\") \n";
        final AndDescr descrs = new AndDescr();
        final CharStream charStream = new ANTLRStringStream( text );
        final DRLLexer lexer = new DRLLexer( charStream );
        final TokenStream tokenStream = new CommonTokenStream( lexer );
        final DRLParser parser = new DRLParser( tokenStream );
        parser.setLineOffset( descrs.getLine() );
        parser.normal_lhs_block( descrs );
        if ( parser.hasErrors() ) {
            System.err.println( parser.getErrorMessages() );
View Full Code Here

Examples of org.antlr.runtime3_3_0.TokenStream

  public void enterDecision(int decisionNumber, boolean couldBacktrack) {
    lastRealTokenTouchedInDecision = null;
    stats.numDecisionEvents++;
    int startingLookaheadIndex = parser.getTokenStream().index();
    TokenStream input = parser.getTokenStream();
    if ( dump ) System.out.println("enterDecision canBacktrack="+couldBacktrack+" "+ decisionNumber +
               " backtrack depth " + backtrackDepth +
               " @ " + input.get(input.index()) +
               " rule " +locationDescription());
    String g = (String) currentGrammarFileName.peek();
    DecisionDescriptor descriptor = decisions.get(g, decisionNumber);
    if ( descriptor == null ) {
      descriptor = new DecisionDescriptor();
View Full Code Here

Examples of org.antlr.v4.runtime.TokenStream

    r = g.getRule("block");
    if ( r!=null) System.out.println(dot.getDOT(atn.ruleToStartState[r.index]));

    // Check ATN prediction
//    ParserATNSimulator interp = new ParserATNSimulator(atn);
    TokenStream input = new IntTokenStream(types);
    ParserInterpreterForTesting interp = new ParserInterpreterForTesting(g, input);
    int alt = interp.adaptivePredict(input, decision, ParserRuleContext.EMPTY);

    assertEquals(expectedAlt, alt);

    // Check adaptive prediction
    input.seek(0);
    alt = interp.adaptivePredict(input, decision, null);
    assertEquals(expectedAlt, alt);
    // run 2x; first time creates DFA in atn
    input.seek(0);
    alt = interp.adaptivePredict(input, decision, null);
    assertEquals(expectedAlt, alt);
  }
View Full Code Here

Examples of org.apache.lucene.analysis.TokenStream

      if (mHighlighter != null)
      {
        try
        {
          TokenStream tokenStream = new StandardAnalyzer().tokenStream("message", new StringReader(encoded_message));
          String highlighted = mHighlighter.getBestFragments(tokenStream, encoded_message, 25, "...");

          if (!highlighted.equals(""))
          {
            encoded_message = highlighted;
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.