Package com.esotericsoftware.yamlbeans.tokenizer

Examples of com.esotericsoftware.yamlbeans.tokenizer.ScalarToken


        return null;
      }
    };
    table[P_SCALAR] = new Production() {
      public Event produce () {
        ScalarToken token = (ScalarToken)tokenizer.getNextToken();
        boolean[] implicit = null;
        if (token.getPlain() && tags.get(0) == null || "!".equals(tags.get(0)))
          implicit = new boolean[] {true, false};
        else if (tags.get(0) == null)
          implicit = new boolean[] {false, true};
        else
          implicit = new boolean[] {false, false};
        return new ScalarEvent(anchors.get(0), tags.get(0), implicit, token.getValue(), token.getStyle());
      }
    };
    table[P_BLOCK_SEQUENCE_ENTRY] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == BLOCK_ENTRY) {
          tokenizer.getNextToken();
          TokenType type = tokenizer.peekNextTokenType();
          if (type == BLOCK_ENTRY || type == BLOCK_END) {
            parseStack.add(0, table[P_BLOCK_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          } else {
            parseStack.add(0, table[P_BLOCK_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_BLOCK_NODE]);
          }
        }
        return null;
      }
    };
    table[P_BLOCK_MAPPING_ENTRY] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == KEY) {
          tokenizer.getNextToken();
          type = tokenizer.peekNextTokenType();
          if (type == KEY || type == VALUE || type == BLOCK_END) {
            parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY]);
            parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY_VALUE]);
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          } else {
            parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY]);
            parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY_VALUE]);
            parseStack.add(0, table[P_BLOCK_NODE_OR_INDENTLESS_SEQUENCE]);
            parseStack.add(0, table[P_PROPERTIES]);
          }
        } else if (type == VALUE) {
          parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY]);
          parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY_VALUE]);
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        }
        return null;
      }
    };
    table[P_BLOCK_MAPPING_ENTRY_VALUE] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == VALUE) {
          tokenizer.getNextToken();
          type = tokenizer.peekNextTokenType();
          if (type == KEY || type == VALUE || type == BLOCK_END)
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          else {
            parseStack.add(0, table[P_BLOCK_NODE_OR_INDENTLESS_SEQUENCE]);
            parseStack.add(0, table[P_PROPERTIES]);
          }
        } else if (type == KEY) parseStack.add(0, table[P_EMPTY_SCALAR]);
        return null;
      }
    };
    table[P_BLOCK_NODE_OR_INDENTLESS_SEQUENCE] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == ALIAS)
          parseStack.add(0, table[P_ALIAS]);
        else if (type == BLOCK_ENTRY) {
          parseStack.add(0, table[P_INDENTLESS_BLOCK_SEQUENCE]);
        } else {
          parseStack.add(0, table[P_BLOCK_CONTENT]);
        }
        return null;
      }
    };
    table[P_BLOCK_SEQUENCE_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        tokenizer.getNextToken();
        return new SequenceStartEvent(anchors.get(0), tags.get(0), implicit, false);
      }
    };
    table[P_BLOCK_SEQUENCE_END] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() != BLOCK_END)
          throw new ParserException("Expected a 'block end' but found: " + tokenizer.peekNextTokenType());
        tokenizer.getNextToken();
        return Event.SEQUENCE_END;
      }
    };
    table[P_BLOCK_MAPPING_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        tokenizer.getNextToken();
        return new MappingStartEvent(anchors.get(0), tags.get(0), implicit, false);
      }
    };
    table[P_BLOCK_MAPPING_END] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() != BLOCK_END)
          throw new ParserException("Expected a 'block end' but found: " + tokenizer.peekNextTokenType());
        tokenizer.getNextToken();
        return Event.MAPPING_END;
      }
    };
    table[P_INDENTLESS_BLOCK_SEQUENCE] = new Production() {
      public Event produce () {
        parseStack.add(0, table[P_BLOCK_INDENTLESS_SEQUENCE_END]);
        parseStack.add(0, table[P_INDENTLESS_BLOCK_SEQUENCE_ENTRY]);
        parseStack.add(0, table[P_BLOCK_INDENTLESS_SEQUENCE_START]);
        return null;
      }
    };
    table[P_BLOCK_INDENTLESS_SEQUENCE_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        return new SequenceStartEvent(anchors.get(0), tags.get(0), implicit, false);
      }
    };
    table[P_INDENTLESS_BLOCK_SEQUENCE_ENTRY] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == BLOCK_ENTRY) {
          tokenizer.getNextToken();
          TokenType type = tokenizer.peekNextTokenType();
          if (type == BLOCK_ENTRY || type == KEY || type == VALUE || type == BLOCK_END) {
            parseStack.add(0, table[P_INDENTLESS_BLOCK_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          } else {
            parseStack.add(0, table[P_INDENTLESS_BLOCK_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_BLOCK_NODE]);
          }
        }
        return null;
      }
    };
    table[P_BLOCK_INDENTLESS_SEQUENCE_END] = new Production() {
      public Event produce () {
        return Event.SEQUENCE_END;
      }
    };
    table[P_FLOW_SEQUENCE_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        tokenizer.getNextToken();
        return new SequenceStartEvent(anchors.get(0), tags.get(0), implicit, true);
      }
    };
    table[P_FLOW_SEQUENCE_ENTRY] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() != FLOW_SEQUENCE_END) {
          if (tokenizer.peekNextTokenType() == KEY) {
            parseStack.add(0, table[P_FLOW_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_FLOW_ENTRY_MARKER]);
            parseStack.add(0, table[P_FLOW_INTERNAL_MAPPING_END]);
            parseStack.add(0, table[P_FLOW_INTERNAL_VALUE]);
            parseStack.add(0, table[P_FLOW_INTERNAL_CONTENT]);
            parseStack.add(0, table[P_FLOW_INTERNAL_MAPPING_START]);
          } else {
            parseStack.add(0, table[P_FLOW_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_FLOW_NODE]);
            parseStack.add(0, table[P_FLOW_ENTRY_MARKER]);
          }
        }
        return null;
      }
    };
    table[P_FLOW_SEQUENCE_END] = new Production() {
      public Event produce () {
        tokenizer.getNextToken();
        return Event.SEQUENCE_END;
      }
    };
    table[P_FLOW_MAPPING_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        tokenizer.getNextToken();
        return new MappingStartEvent(anchors.get(0), tags.get(0), implicit, true);
      }
    };
    table[P_FLOW_MAPPING_ENTRY] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() != FLOW_MAPPING_END) {
          if (tokenizer.peekNextTokenType() == KEY) {
            parseStack.add(0, table[P_FLOW_MAPPING_ENTRY]);
            parseStack.add(0, table[P_FLOW_ENTRY_MARKER]);
            parseStack.add(0, table[P_FLOW_MAPPING_INTERNAL_VALUE]);
            parseStack.add(0, table[P_FLOW_MAPPING_INTERNAL_CONTENT]);
          } else {
            parseStack.add(0, table[P_FLOW_MAPPING_ENTRY]);
            parseStack.add(0, table[P_FLOW_NODE]);
            parseStack.add(0, table[P_FLOW_ENTRY_MARKER]);
          }
        }
        return null;
      }
    };
    table[P_FLOW_MAPPING_END] = new Production() {
      public Event produce () {
        tokenizer.getNextToken();
        return Event.MAPPING_END;
      }
    };
    table[P_FLOW_INTERNAL_MAPPING_START] = new Production() {
      public Event produce () {
        tokenizer.getNextToken();
        return new MappingStartEvent(null, null, true, true);
      }
    };
    table[P_FLOW_INTERNAL_CONTENT] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == VALUE || type == FLOW_ENTRY || type == FLOW_SEQUENCE_END)
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        else
          parseStack.add(0, table[P_FLOW_NODE]);
        return null;
      }
    };
    table[P_FLOW_INTERNAL_VALUE] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == VALUE) {
          tokenizer.getNextToken();
          if (tokenizer.peekNextTokenType() == FLOW_ENTRY || tokenizer.peekNextTokenType() == FLOW_SEQUENCE_END)
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          else
            parseStack.add(0, table[P_FLOW_NODE]);
        } else
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        return null;
      }
    };
    table[P_FLOW_INTERNAL_MAPPING_END] = new Production() {
      public Event produce () {
        return Event.MAPPING_END;
      }
    };
    table[P_FLOW_ENTRY_MARKER] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == FLOW_ENTRY) tokenizer.getNextToken();
        return null;
      }
    };
    table[P_FLOW_NODE] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == ALIAS)
          parseStack.add(0, table[P_ALIAS]);
        else {
          parseStack.add(0, table[P_PROPERTIES_END]);
          parseStack.add(0, table[P_FLOW_CONTENT]);
          parseStack.add(0, table[P_PROPERTIES]);
        }
        return null;
      }
    };
    table[P_FLOW_MAPPING_INTERNAL_CONTENT] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == VALUE || type == FLOW_ENTRY || type == FLOW_MAPPING_END)
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        else {
          tokenizer.getNextToken();
          parseStack.add(0, table[P_FLOW_NODE]);
        }
        return null;
      }
    };
    table[P_FLOW_MAPPING_INTERNAL_VALUE] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == VALUE) {
          tokenizer.getNextToken();
          if (tokenizer.peekNextTokenType() == FLOW_ENTRY || tokenizer.peekNextTokenType() == FLOW_MAPPING_END)
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          else
            parseStack.add(0, table[P_FLOW_NODE]);
        } else
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        return null;
      }
    };
    table[P_ALIAS] = new Production() {
      public Event produce () {
        AliasToken token = (AliasToken)tokenizer.getNextToken();
        return new AliasEvent(token.getInstanceName());
      }
    };
    table[P_EMPTY_SCALAR] = new Production() {
      public Event produce () {
        return new ScalarEvent(null, null, new boolean[] {true, false}, "", (char)0);
View Full Code Here


        return null;
      }
    };
    table[P_SCALAR] = new Production() {
      public Event produce () {
        ScalarToken token = (ScalarToken)tokenizer.getNextToken();
        boolean[] implicit = null;
        if (token.getPlain() && tags.get(0) == null || "!".equals(tags.get(0)))
          implicit = new boolean[] {true, false};
        else if (tags.get(0) == null)
          implicit = new boolean[] {false, true};
        else
          implicit = new boolean[] {false, false};
        return new ScalarEvent(anchors.get(0), tags.get(0), implicit, token.getValue(), token.getStyle());
      }
    };
    table[P_BLOCK_SEQUENCE_ENTRY] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == BLOCK_ENTRY) {
          tokenizer.getNextToken();
          TokenType type = tokenizer.peekNextTokenType();
          if (type == BLOCK_ENTRY || type == BLOCK_END) {
            parseStack.add(0, table[P_BLOCK_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          } else {
            parseStack.add(0, table[P_BLOCK_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_BLOCK_NODE]);
          }
        }
        return null;
      }
    };
    table[P_BLOCK_MAPPING_ENTRY] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == KEY) {
          tokenizer.getNextToken();
          type = tokenizer.peekNextTokenType();
          if (type == KEY || type == VALUE || type == BLOCK_END) {
            parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY]);
            parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY_VALUE]);
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          } else {
            parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY]);
            parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY_VALUE]);
            parseStack.add(0, table[P_BLOCK_NODE_OR_INDENTLESS_SEQUENCE]);
            parseStack.add(0, table[P_PROPERTIES]);
          }
        } else if (type == VALUE) {
          parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY]);
          parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY_VALUE]);
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        }
        return null;
      }
    };
    table[P_BLOCK_MAPPING_ENTRY_VALUE] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == VALUE) {
          tokenizer.getNextToken();
          type = tokenizer.peekNextTokenType();
          if (type == KEY || type == VALUE || type == BLOCK_END)
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          else {
            parseStack.add(0, table[P_BLOCK_NODE_OR_INDENTLESS_SEQUENCE]);
            parseStack.add(0, table[P_PROPERTIES]);
          }
        } else if (type == KEY) parseStack.add(0, table[P_EMPTY_SCALAR]);
        return null;
      }
    };
    table[P_BLOCK_NODE_OR_INDENTLESS_SEQUENCE] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == ALIAS)
          parseStack.add(0, table[P_ALIAS]);
        else if (type == BLOCK_ENTRY) {
          parseStack.add(0, table[P_INDENTLESS_BLOCK_SEQUENCE]);
        } else {
          parseStack.add(0, table[P_BLOCK_CONTENT]);
        }
        return null;
      }
    };
    table[P_BLOCK_SEQUENCE_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        tokenizer.getNextToken();
        return new SequenceStartEvent(anchors.get(0), tags.get(0), implicit, false);
      }
    };
    table[P_BLOCK_SEQUENCE_END] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() != BLOCK_END)
          throw new ParserException("Expected a 'block end' but found: " + tokenizer.peekNextTokenType());
        tokenizer.getNextToken();
        return Event.SEQUENCE_END;
      }
    };
    table[P_BLOCK_MAPPING_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        tokenizer.getNextToken();
        return new MappingStartEvent(anchors.get(0), tags.get(0), implicit, false);
      }
    };
    table[P_BLOCK_MAPPING_END] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() != BLOCK_END)
          throw new ParserException("Expected a 'block end' but found: " + tokenizer.peekNextTokenType());
        tokenizer.getNextToken();
        return Event.MAPPING_END;
      }
    };
    table[P_INDENTLESS_BLOCK_SEQUENCE] = new Production() {
      public Event produce () {
        parseStack.add(0, table[P_BLOCK_INDENTLESS_SEQUENCE_END]);
        parseStack.add(0, table[P_INDENTLESS_BLOCK_SEQUENCE_ENTRY]);
        parseStack.add(0, table[P_BLOCK_INDENTLESS_SEQUENCE_START]);
        return null;
      }
    };
    table[P_BLOCK_INDENTLESS_SEQUENCE_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        return new SequenceStartEvent(anchors.get(0), tags.get(0), implicit, false);
      }
    };
    table[P_INDENTLESS_BLOCK_SEQUENCE_ENTRY] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == BLOCK_ENTRY) {
          tokenizer.getNextToken();
          TokenType type = tokenizer.peekNextTokenType();
          if (type == BLOCK_ENTRY || type == KEY || type == VALUE || type == BLOCK_END) {
            parseStack.add(0, table[P_INDENTLESS_BLOCK_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          } else {
            parseStack.add(0, table[P_INDENTLESS_BLOCK_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_BLOCK_NODE]);
          }
        }
        return null;
      }
    };
    table[P_BLOCK_INDENTLESS_SEQUENCE_END] = new Production() {
      public Event produce () {
        return Event.SEQUENCE_END;
      }
    };
    table[P_FLOW_SEQUENCE_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        tokenizer.getNextToken();
        return new SequenceStartEvent(anchors.get(0), tags.get(0), implicit, true);
      }
    };
    table[P_FLOW_SEQUENCE_ENTRY] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() != FLOW_SEQUENCE_END) {
          if (tokenizer.peekNextTokenType() == KEY) {
            parseStack.add(0, table[P_FLOW_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_FLOW_ENTRY_MARKER]);
            parseStack.add(0, table[P_FLOW_INTERNAL_MAPPING_END]);
            parseStack.add(0, table[P_FLOW_INTERNAL_VALUE]);
            parseStack.add(0, table[P_FLOW_INTERNAL_CONTENT]);
            parseStack.add(0, table[P_FLOW_INTERNAL_MAPPING_START]);
          } else {
            parseStack.add(0, table[P_FLOW_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_FLOW_NODE]);
            parseStack.add(0, table[P_FLOW_ENTRY_MARKER]);
          }
        }
        return null;
      }
    };
    table[P_FLOW_SEQUENCE_END] = new Production() {
      public Event produce () {
        tokenizer.getNextToken();
        return Event.SEQUENCE_END;
      }
    };
    table[P_FLOW_MAPPING_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        tokenizer.getNextToken();
        return new MappingStartEvent(anchors.get(0), tags.get(0), implicit, true);
      }
    };
    table[P_FLOW_MAPPING_ENTRY] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() != FLOW_MAPPING_END) {
          if (tokenizer.peekNextTokenType() == KEY) {
            parseStack.add(0, table[P_FLOW_MAPPING_ENTRY]);
            parseStack.add(0, table[P_FLOW_ENTRY_MARKER]);
            parseStack.add(0, table[P_FLOW_MAPPING_INTERNAL_VALUE]);
            parseStack.add(0, table[P_FLOW_MAPPING_INTERNAL_CONTENT]);
          } else {
            parseStack.add(0, table[P_FLOW_MAPPING_ENTRY]);
            parseStack.add(0, table[P_FLOW_NODE]);
            parseStack.add(0, table[P_FLOW_ENTRY_MARKER]);
          }
        }
        return null;
      }
    };
    table[P_FLOW_MAPPING_END] = new Production() {
      public Event produce () {
        tokenizer.getNextToken();
        return Event.MAPPING_END;
      }
    };
    table[P_FLOW_INTERNAL_MAPPING_START] = new Production() {
      public Event produce () {
        tokenizer.getNextToken();
        return new MappingStartEvent(null, null, true, true);
      }
    };
    table[P_FLOW_INTERNAL_CONTENT] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == VALUE || type == FLOW_ENTRY || type == FLOW_SEQUENCE_END)
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        else
          parseStack.add(0, table[P_FLOW_NODE]);
        return null;
      }
    };
    table[P_FLOW_INTERNAL_VALUE] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == VALUE) {
          tokenizer.getNextToken();
          if (tokenizer.peekNextTokenType() == FLOW_ENTRY || tokenizer.peekNextTokenType() == FLOW_SEQUENCE_END)
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          else
            parseStack.add(0, table[P_FLOW_NODE]);
        } else
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        return null;
      }
    };
    table[P_FLOW_INTERNAL_MAPPING_END] = new Production() {
      public Event produce () {
        return Event.MAPPING_END;
      }
    };
    table[P_FLOW_ENTRY_MARKER] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == FLOW_ENTRY) tokenizer.getNextToken();
        return null;
      }
    };
    table[P_FLOW_NODE] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == ALIAS)
          parseStack.add(0, table[P_ALIAS]);
        else {
          parseStack.add(0, table[P_PROPERTIES_END]);
          parseStack.add(0, table[P_FLOW_CONTENT]);
          parseStack.add(0, table[P_PROPERTIES]);
        }
        return null;
      }
    };
    table[P_FLOW_MAPPING_INTERNAL_CONTENT] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == VALUE || type == FLOW_ENTRY || type == FLOW_MAPPING_END)
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        else {
          tokenizer.getNextToken();
          parseStack.add(0, table[P_FLOW_NODE]);
        }
        return null;
      }
    };
    table[P_FLOW_MAPPING_INTERNAL_VALUE] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == VALUE) {
          tokenizer.getNextToken();
          if (tokenizer.peekNextTokenType() == FLOW_ENTRY || tokenizer.peekNextTokenType() == FLOW_MAPPING_END)
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          else
            parseStack.add(0, table[P_FLOW_NODE]);
        } else
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        return null;
      }
    };
    table[P_ALIAS] = new Production() {
      public Event produce () {
        AliasToken token = (AliasToken)tokenizer.getNextToken();
        return new AliasEvent(token.getInstanceName());
      }
    };
    table[P_EMPTY_SCALAR] = new Production() {
      public Event produce () {
        return new ScalarEvent(null, null, new boolean[] {true, false}, "", (char)0);
View Full Code Here

TOP

Related Classes of com.esotericsoftware.yamlbeans.tokenizer.ScalarToken

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.