Examples of ScalarToken


Examples of com.esotericsoftware.yamlbeans.tokenizer.ScalarToken

        return null;
      }
    };
    table[P_SCALAR] = new Production() {
      public Event produce () {
        ScalarToken token = (ScalarToken)tokenizer.getNextToken();
        boolean[] implicit = null;
        if (token.getPlain() && tags.get(0) == null || "!".equals(tags.get(0)))
          implicit = new boolean[] {true, false};
        else if (tags.get(0) == null)
          implicit = new boolean[] {false, true};
        else
          implicit = new boolean[] {false, false};
        return new ScalarEvent(anchors.get(0), tags.get(0), implicit, token.getValue(), token.getStyle());
      }
    };
    table[P_BLOCK_SEQUENCE_ENTRY] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == BLOCK_ENTRY) {
          tokenizer.getNextToken();
          TokenType type = tokenizer.peekNextTokenType();
          if (type == BLOCK_ENTRY || type == BLOCK_END) {
            parseStack.add(0, table[P_BLOCK_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          } else {
            parseStack.add(0, table[P_BLOCK_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_BLOCK_NODE]);
          }
        }
        return null;
      }
    };
    table[P_BLOCK_MAPPING_ENTRY] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == KEY) {
          tokenizer.getNextToken();
          type = tokenizer.peekNextTokenType();
          if (type == KEY || type == VALUE || type == BLOCK_END) {
            parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY]);
            parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY_VALUE]);
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          } else {
            parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY]);
            parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY_VALUE]);
            parseStack.add(0, table[P_BLOCK_NODE_OR_INDENTLESS_SEQUENCE]);
            parseStack.add(0, table[P_PROPERTIES]);
          }
        } else if (type == VALUE) {
          parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY]);
          parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY_VALUE]);
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        }
        return null;
      }
    };
    table[P_BLOCK_MAPPING_ENTRY_VALUE] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == VALUE) {
          tokenizer.getNextToken();
          type = tokenizer.peekNextTokenType();
          if (type == KEY || type == VALUE || type == BLOCK_END)
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          else {
            parseStack.add(0, table[P_BLOCK_NODE_OR_INDENTLESS_SEQUENCE]);
            parseStack.add(0, table[P_PROPERTIES]);
          }
        } else if (type == KEY) parseStack.add(0, table[P_EMPTY_SCALAR]);
        return null;
      }
    };
    table[P_BLOCK_NODE_OR_INDENTLESS_SEQUENCE] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == ALIAS)
          parseStack.add(0, table[P_ALIAS]);
        else if (type == BLOCK_ENTRY) {
          parseStack.add(0, table[P_INDENTLESS_BLOCK_SEQUENCE]);
        } else {
          parseStack.add(0, table[P_BLOCK_CONTENT]);
        }
        return null;
      }
    };
    table[P_BLOCK_SEQUENCE_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        tokenizer.getNextToken();
        return new SequenceStartEvent(anchors.get(0), tags.get(0), implicit, false);
      }
    };
    table[P_BLOCK_SEQUENCE_END] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() != BLOCK_END)
          throw new ParserException("Expected a 'block end' but found: " + tokenizer.peekNextTokenType());
        tokenizer.getNextToken();
        return Event.SEQUENCE_END;
      }
    };
    table[P_BLOCK_MAPPING_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        tokenizer.getNextToken();
        return new MappingStartEvent(anchors.get(0), tags.get(0), implicit, false);
      }
    };
    table[P_BLOCK_MAPPING_END] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() != BLOCK_END)
          throw new ParserException("Expected a 'block end' but found: " + tokenizer.peekNextTokenType());
        tokenizer.getNextToken();
        return Event.MAPPING_END;
      }
    };
    table[P_INDENTLESS_BLOCK_SEQUENCE] = new Production() {
      public Event produce () {
        parseStack.add(0, table[P_BLOCK_INDENTLESS_SEQUENCE_END]);
        parseStack.add(0, table[P_INDENTLESS_BLOCK_SEQUENCE_ENTRY]);
        parseStack.add(0, table[P_BLOCK_INDENTLESS_SEQUENCE_START]);
        return null;
      }
    };
    table[P_BLOCK_INDENTLESS_SEQUENCE_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        return new SequenceStartEvent(anchors.get(0), tags.get(0), implicit, false);
      }
    };
    table[P_INDENTLESS_BLOCK_SEQUENCE_ENTRY] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == BLOCK_ENTRY) {
          tokenizer.getNextToken();
          TokenType type = tokenizer.peekNextTokenType();
          if (type == BLOCK_ENTRY || type == KEY || type == VALUE || type == BLOCK_END) {
            parseStack.add(0, table[P_INDENTLESS_BLOCK_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          } else {
            parseStack.add(0, table[P_INDENTLESS_BLOCK_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_BLOCK_NODE]);
          }
        }
        return null;
      }
    };
    table[P_BLOCK_INDENTLESS_SEQUENCE_END] = new Production() {
      public Event produce () {
        return Event.SEQUENCE_END;
      }
    };
    table[P_FLOW_SEQUENCE_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        tokenizer.getNextToken();
        return new SequenceStartEvent(anchors.get(0), tags.get(0), implicit, true);
      }
    };
    table[P_FLOW_SEQUENCE_ENTRY] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() != FLOW_SEQUENCE_END) {
          if (tokenizer.peekNextTokenType() == KEY) {
            parseStack.add(0, table[P_FLOW_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_FLOW_ENTRY_MARKER]);
            parseStack.add(0, table[P_FLOW_INTERNAL_MAPPING_END]);
            parseStack.add(0, table[P_FLOW_INTERNAL_VALUE]);
            parseStack.add(0, table[P_FLOW_INTERNAL_CONTENT]);
            parseStack.add(0, table[P_FLOW_INTERNAL_MAPPING_START]);
          } else {
            parseStack.add(0, table[P_FLOW_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_FLOW_NODE]);
            parseStack.add(0, table[P_FLOW_ENTRY_MARKER]);
          }
        }
        return null;
      }
    };
    table[P_FLOW_SEQUENCE_END] = new Production() {
      public Event produce () {
        tokenizer.getNextToken();
        return Event.SEQUENCE_END;
      }
    };
    table[P_FLOW_MAPPING_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        tokenizer.getNextToken();
        return new MappingStartEvent(anchors.get(0), tags.get(0), implicit, true);
      }
    };
    table[P_FLOW_MAPPING_ENTRY] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() != FLOW_MAPPING_END) {
          if (tokenizer.peekNextTokenType() == KEY) {
            parseStack.add(0, table[P_FLOW_MAPPING_ENTRY]);
            parseStack.add(0, table[P_FLOW_ENTRY_MARKER]);
            parseStack.add(0, table[P_FLOW_MAPPING_INTERNAL_VALUE]);
            parseStack.add(0, table[P_FLOW_MAPPING_INTERNAL_CONTENT]);
          } else {
            parseStack.add(0, table[P_FLOW_MAPPING_ENTRY]);
            parseStack.add(0, table[P_FLOW_NODE]);
            parseStack.add(0, table[P_FLOW_ENTRY_MARKER]);
          }
        }
        return null;
      }
    };
    table[P_FLOW_MAPPING_END] = new Production() {
      public Event produce () {
        tokenizer.getNextToken();
        return Event.MAPPING_END;
      }
    };
    table[P_FLOW_INTERNAL_MAPPING_START] = new Production() {
      public Event produce () {
        tokenizer.getNextToken();
        return new MappingStartEvent(null, null, true, true);
      }
    };
    table[P_FLOW_INTERNAL_CONTENT] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == VALUE || type == FLOW_ENTRY || type == FLOW_SEQUENCE_END)
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        else
          parseStack.add(0, table[P_FLOW_NODE]);
        return null;
      }
    };
    table[P_FLOW_INTERNAL_VALUE] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == VALUE) {
          tokenizer.getNextToken();
          if (tokenizer.peekNextTokenType() == FLOW_ENTRY || tokenizer.peekNextTokenType() == FLOW_SEQUENCE_END)
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          else
            parseStack.add(0, table[P_FLOW_NODE]);
        } else
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        return null;
      }
    };
    table[P_FLOW_INTERNAL_MAPPING_END] = new Production() {
      public Event produce () {
        return Event.MAPPING_END;
      }
    };
    table[P_FLOW_ENTRY_MARKER] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == FLOW_ENTRY) tokenizer.getNextToken();
        return null;
      }
    };
    table[P_FLOW_NODE] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == ALIAS)
          parseStack.add(0, table[P_ALIAS]);
        else {
          parseStack.add(0, table[P_PROPERTIES_END]);
          parseStack.add(0, table[P_FLOW_CONTENT]);
          parseStack.add(0, table[P_PROPERTIES]);
        }
        return null;
      }
    };
    table[P_FLOW_MAPPING_INTERNAL_CONTENT] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == VALUE || type == FLOW_ENTRY || type == FLOW_MAPPING_END)
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        else {
          tokenizer.getNextToken();
          parseStack.add(0, table[P_FLOW_NODE]);
        }
        return null;
      }
    };
    table[P_FLOW_MAPPING_INTERNAL_VALUE] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == VALUE) {
          tokenizer.getNextToken();
          if (tokenizer.peekNextTokenType() == FLOW_ENTRY || tokenizer.peekNextTokenType() == FLOW_MAPPING_END)
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          else
            parseStack.add(0, table[P_FLOW_NODE]);
        } else
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        return null;
      }
    };
    table[P_ALIAS] = new Production() {
      public Event produce () {
        AliasToken token = (AliasToken)tokenizer.getNextToken();
        return new AliasEvent(token.getInstanceName());
      }
    };
    table[P_EMPTY_SCALAR] = new Production() {
      public Event produce () {
        return new ScalarEvent(null, null, new boolean[] {true, false}, "", (char)0);
View Full Code Here

Examples of com.esotericsoftware.yamlbeans.tokenizer.ScalarToken

        return null;
      }
    };
    table[P_SCALAR] = new Production() {
      public Event produce () {
        ScalarToken token = (ScalarToken)tokenizer.getNextToken();
        boolean[] implicit = null;
        if (token.getPlain() && tags.get(0) == null || "!".equals(tags.get(0)))
          implicit = new boolean[] {true, false};
        else if (tags.get(0) == null)
          implicit = new boolean[] {false, true};
        else
          implicit = new boolean[] {false, false};
        return new ScalarEvent(anchors.get(0), tags.get(0), implicit, token.getValue(), token.getStyle());
      }
    };
    table[P_BLOCK_SEQUENCE_ENTRY] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == BLOCK_ENTRY) {
          tokenizer.getNextToken();
          TokenType type = tokenizer.peekNextTokenType();
          if (type == BLOCK_ENTRY || type == BLOCK_END) {
            parseStack.add(0, table[P_BLOCK_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          } else {
            parseStack.add(0, table[P_BLOCK_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_BLOCK_NODE]);
          }
        }
        return null;
      }
    };
    table[P_BLOCK_MAPPING_ENTRY] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == KEY) {
          tokenizer.getNextToken();
          type = tokenizer.peekNextTokenType();
          if (type == KEY || type == VALUE || type == BLOCK_END) {
            parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY]);
            parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY_VALUE]);
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          } else {
            parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY]);
            parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY_VALUE]);
            parseStack.add(0, table[P_BLOCK_NODE_OR_INDENTLESS_SEQUENCE]);
            parseStack.add(0, table[P_PROPERTIES]);
          }
        } else if (type == VALUE) {
          parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY]);
          parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY_VALUE]);
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        }
        return null;
      }
    };
    table[P_BLOCK_MAPPING_ENTRY_VALUE] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == VALUE) {
          tokenizer.getNextToken();
          type = tokenizer.peekNextTokenType();
          if (type == KEY || type == VALUE || type == BLOCK_END)
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          else {
            parseStack.add(0, table[P_BLOCK_NODE_OR_INDENTLESS_SEQUENCE]);
            parseStack.add(0, table[P_PROPERTIES]);
          }
        } else if (type == KEY) parseStack.add(0, table[P_EMPTY_SCALAR]);
        return null;
      }
    };
    table[P_BLOCK_NODE_OR_INDENTLESS_SEQUENCE] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == ALIAS)
          parseStack.add(0, table[P_ALIAS]);
        else if (type == BLOCK_ENTRY) {
          parseStack.add(0, table[P_INDENTLESS_BLOCK_SEQUENCE]);
        } else {
          parseStack.add(0, table[P_BLOCK_CONTENT]);
        }
        return null;
      }
    };
    table[P_BLOCK_SEQUENCE_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        tokenizer.getNextToken();
        return new SequenceStartEvent(anchors.get(0), tags.get(0), implicit, false);
      }
    };
    table[P_BLOCK_SEQUENCE_END] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() != BLOCK_END)
          throw new ParserException("Expected a 'block end' but found: " + tokenizer.peekNextTokenType());
        tokenizer.getNextToken();
        return Event.SEQUENCE_END;
      }
    };
    table[P_BLOCK_MAPPING_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        tokenizer.getNextToken();
        return new MappingStartEvent(anchors.get(0), tags.get(0), implicit, false);
      }
    };
    table[P_BLOCK_MAPPING_END] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() != BLOCK_END)
          throw new ParserException("Expected a 'block end' but found: " + tokenizer.peekNextTokenType());
        tokenizer.getNextToken();
        return Event.MAPPING_END;
      }
    };
    table[P_INDENTLESS_BLOCK_SEQUENCE] = new Production() {
      public Event produce () {
        parseStack.add(0, table[P_BLOCK_INDENTLESS_SEQUENCE_END]);
        parseStack.add(0, table[P_INDENTLESS_BLOCK_SEQUENCE_ENTRY]);
        parseStack.add(0, table[P_BLOCK_INDENTLESS_SEQUENCE_START]);
        return null;
      }
    };
    table[P_BLOCK_INDENTLESS_SEQUENCE_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        return new SequenceStartEvent(anchors.get(0), tags.get(0), implicit, false);
      }
    };
    table[P_INDENTLESS_BLOCK_SEQUENCE_ENTRY] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == BLOCK_ENTRY) {
          tokenizer.getNextToken();
          TokenType type = tokenizer.peekNextTokenType();
          if (type == BLOCK_ENTRY || type == KEY || type == VALUE || type == BLOCK_END) {
            parseStack.add(0, table[P_INDENTLESS_BLOCK_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          } else {
            parseStack.add(0, table[P_INDENTLESS_BLOCK_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_BLOCK_NODE]);
          }
        }
        return null;
      }
    };
    table[P_BLOCK_INDENTLESS_SEQUENCE_END] = new Production() {
      public Event produce () {
        return Event.SEQUENCE_END;
      }
    };
    table[P_FLOW_SEQUENCE_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        tokenizer.getNextToken();
        return new SequenceStartEvent(anchors.get(0), tags.get(0), implicit, true);
      }
    };
    table[P_FLOW_SEQUENCE_ENTRY] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() != FLOW_SEQUENCE_END) {
          if (tokenizer.peekNextTokenType() == KEY) {
            parseStack.add(0, table[P_FLOW_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_FLOW_ENTRY_MARKER]);
            parseStack.add(0, table[P_FLOW_INTERNAL_MAPPING_END]);
            parseStack.add(0, table[P_FLOW_INTERNAL_VALUE]);
            parseStack.add(0, table[P_FLOW_INTERNAL_CONTENT]);
            parseStack.add(0, table[P_FLOW_INTERNAL_MAPPING_START]);
          } else {
            parseStack.add(0, table[P_FLOW_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_FLOW_NODE]);
            parseStack.add(0, table[P_FLOW_ENTRY_MARKER]);
          }
        }
        return null;
      }
    };
    table[P_FLOW_SEQUENCE_END] = new Production() {
      public Event produce () {
        tokenizer.getNextToken();
        return Event.SEQUENCE_END;
      }
    };
    table[P_FLOW_MAPPING_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        tokenizer.getNextToken();
        return new MappingStartEvent(anchors.get(0), tags.get(0), implicit, true);
      }
    };
    table[P_FLOW_MAPPING_ENTRY] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() != FLOW_MAPPING_END) {
          if (tokenizer.peekNextTokenType() == KEY) {
            parseStack.add(0, table[P_FLOW_MAPPING_ENTRY]);
            parseStack.add(0, table[P_FLOW_ENTRY_MARKER]);
            parseStack.add(0, table[P_FLOW_MAPPING_INTERNAL_VALUE]);
            parseStack.add(0, table[P_FLOW_MAPPING_INTERNAL_CONTENT]);
          } else {
            parseStack.add(0, table[P_FLOW_MAPPING_ENTRY]);
            parseStack.add(0, table[P_FLOW_NODE]);
            parseStack.add(0, table[P_FLOW_ENTRY_MARKER]);
          }
        }
        return null;
      }
    };
    table[P_FLOW_MAPPING_END] = new Production() {
      public Event produce () {
        tokenizer.getNextToken();
        return Event.MAPPING_END;
      }
    };
    table[P_FLOW_INTERNAL_MAPPING_START] = new Production() {
      public Event produce () {
        tokenizer.getNextToken();
        return new MappingStartEvent(null, null, true, true);
      }
    };
    table[P_FLOW_INTERNAL_CONTENT] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == VALUE || type == FLOW_ENTRY || type == FLOW_SEQUENCE_END)
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        else
          parseStack.add(0, table[P_FLOW_NODE]);
        return null;
      }
    };
    table[P_FLOW_INTERNAL_VALUE] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == VALUE) {
          tokenizer.getNextToken();
          if (tokenizer.peekNextTokenType() == FLOW_ENTRY || tokenizer.peekNextTokenType() == FLOW_SEQUENCE_END)
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          else
            parseStack.add(0, table[P_FLOW_NODE]);
        } else
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        return null;
      }
    };
    table[P_FLOW_INTERNAL_MAPPING_END] = new Production() {
      public Event produce () {
        return Event.MAPPING_END;
      }
    };
    table[P_FLOW_ENTRY_MARKER] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == FLOW_ENTRY) tokenizer.getNextToken();
        return null;
      }
    };
    table[P_FLOW_NODE] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == ALIAS)
          parseStack.add(0, table[P_ALIAS]);
        else {
          parseStack.add(0, table[P_PROPERTIES_END]);
          parseStack.add(0, table[P_FLOW_CONTENT]);
          parseStack.add(0, table[P_PROPERTIES]);
        }
        return null;
      }
    };
    table[P_FLOW_MAPPING_INTERNAL_CONTENT] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == VALUE || type == FLOW_ENTRY || type == FLOW_MAPPING_END)
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        else {
          tokenizer.getNextToken();
          parseStack.add(0, table[P_FLOW_NODE]);
        }
        return null;
      }
    };
    table[P_FLOW_MAPPING_INTERNAL_VALUE] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == VALUE) {
          tokenizer.getNextToken();
          if (tokenizer.peekNextTokenType() == FLOW_ENTRY || tokenizer.peekNextTokenType() == FLOW_MAPPING_END)
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          else
            parseStack.add(0, table[P_FLOW_NODE]);
        } else
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        return null;
      }
    };
    table[P_ALIAS] = new Production() {
      public Event produce () {
        AliasToken token = (AliasToken)tokenizer.getNextToken();
        return new AliasEvent(token.getInstanceName());
      }
    };
    table[P_EMPTY_SCALAR] = new Production() {
      public Event produce () {
        return new ScalarEvent(null, null, new boolean[] {true, false}, "", (char)0);
View Full Code Here

Examples of org.yaml.snakeyaml.tokens.ScalarToken

        Mark dummy = new Mark("dummy", 0, 0, 0, "", 0);
        LinkedList<Token> etalonTokens = new LinkedList<Token>();
        etalonTokens.add(new StreamStartToken(dummy, dummy));
        etalonTokens.add(new BlockMappingStartToken(dummy, dummy));
        etalonTokens.add(new KeyToken(dummy, dummy));
        etalonTokens.add(new ScalarToken("string", true, dummy, dummy, (char) 0));
        etalonTokens.add(new ValueToken(dummy, dummy));
        etalonTokens.add(new ScalarToken("abcd", true, dummy, dummy, (char) 0));
        etalonTokens.add(new BlockEndToken(dummy, dummy));
        etalonTokens.add(new StreamEndToken(dummy, dummy));
        while (!etalonTokens.isEmpty() && scanner.checkToken(etalonTokens.get(0).getTokenId())) {
            assertEquals(etalonTokens.removeFirst(), scanner.getToken());
        }
View Full Code Here

Examples of org.yaml.snakeyaml.tokens.ScalarToken

                index++;
            }
        }
        chunks.append(data.substring(start, index));
        index++;
        return new ScalarToken(chunks.toString(), mark, mark, false);
    }
View Full Code Here

Examples of org.yaml.snakeyaml.tokens.ScalarToken

            if (scanner.checkToken(Token.ID.Tag)) {
                TagToken token = (TagToken) scanner.getToken();
                tag = token.getValue().getHandle() + token.getValue().getSuffix();
            }
            if (scanner.checkToken(Token.ID.Scalar)) {
                ScalarToken token = (ScalarToken) scanner.getToken();
                events.add(new ScalarEvent(anchor, tag, new ImplicitTuple(false, false), token
                        .getValue(), null, null, null));
            } else if (scanner.checkToken(Token.ID.FlowSequenceStart)) {
                events.add(new SequenceStartEvent(anchor, tag, false, null, null, null));
                parseSequence();
            } else if (scanner.checkToken(Token.ID.FlowMappingStart)) {
View Full Code Here

Examples of org.yaml.snakeyaml.tokens.ScalarToken

                event = new SequenceStartEvent(anchor, tag, implicit, startMark, endMark,
                        Boolean.FALSE);
                state = new ParseIndentlessSequenceEntry();
            } else {
                if (scanner.checkToken(Token.ID.Scalar)) {
                    ScalarToken token = (ScalarToken) scanner.getToken();
                    endMark = token.getEndMark();
                    ImplicitTuple implicitValues;
                    if ((token.getPlain() && tag == null) || "!".equals(tag)) {
                        implicitValues = new ImplicitTuple(true, false);
                    } else if (tag == null) {
                        implicitValues = new ImplicitTuple(false, true);
                    } else {
                        implicitValues = new ImplicitTuple(false, false);
                    }
                    event = new ScalarEvent(anchor, tag, implicitValues, token.getValue(),
                            startMark, endMark, token.getStyle());
                    state = states.pop();
                } else if (scanner.checkToken(Token.ID.FlowSequenceStart)) {
                    endMark = scanner.peekToken().getEndMark();
                    event = new SequenceStartEvent(anchor, tag, implicit, startMark, endMark,
                            Boolean.TRUE);
                    state = new ParseFlowSequenceFirstEntry();
                } else if (scanner.checkToken(Token.ID.FlowMappingStart)) {
                    endMark = scanner.peekToken().getEndMark();
                    event = new MappingStartEvent(anchor, tag, implicit, startMark, endMark,
                            Boolean.TRUE);
                    state = new ParseFlowMappingFirstKey();
                } else if (block && scanner.checkToken(Token.ID.BlockSequenceStart)) {
                    endMark = scanner.peekToken().getStartMark();
                    event = new SequenceStartEvent(anchor, tag, implicit, startMark, endMark,
                            Boolean.FALSE);
                    state = new ParseBlockSequenceFirstEntry();
                } else if (block && scanner.checkToken(Token.ID.BlockMappingStart)) {
                    endMark = scanner.peekToken().getStartMark();
                    event = new MappingStartEvent(anchor, tag, implicit, startMark, endMark,
                            Boolean.FALSE);
                    state = new ParseBlockMappingFirstKey();
                } else if (anchor != null || tag != null) {
                    // Empty scalars are allowed even if a tag or an anchor is
                    // specified.
                    event = new ScalarEvent(anchor, tag, new ImplicitTuple(implicit, false), "",
                            startMark, endMark, (char) 0);
                    state = states.pop();
                } else {
                    String node;
                    if (block) {
                        node = "block";
                    } else {
                        node = "flow";
                    }
                    Token token = scanner.peekToken();
                    throw new ParserException("while parsing a " + node + " node", startMark,
                            "expected the node content, but found " + token.getTokenId(),
                            token.getStartMark());
                }
            }
        }
        return event;
    }
View Full Code Here

Examples of org.yaml.snakeyaml.tokens.ScalarToken

        }
        if (chompi.chompTailIsTrue()) {
            chunks.append(breaks);
        }
        // We are done.
        return new ScalarToken(chunks.toString(), false, startMark, endMark, style);
    }
View Full Code Here

Examples of org.yaml.snakeyaml.tokens.ScalarToken

            chunks.append(scanFlowScalarSpaces(startMark));
            chunks.append(scanFlowScalarNonSpaces(_double, startMark));
        }
        reader.forward();
        Mark endMark = reader.getMark();
        return new ScalarToken(chunks.toString(), false, startMark, endMark, style);
    }
View Full Code Here

Examples of org.yaml.snakeyaml.tokens.ScalarToken

            if (spaces.length() == 0 || reader.peek() == '#'
                    || (this.flowLevel == 0 && this.reader.getColumn() < indent)) {
                break;
            }
        }
        return new ScalarToken(chunks.toString(), startMark, endMark, true);
    }
View Full Code Here

Examples of ptolemy.data.ScalarToken

     * @exception IllegalActionException If there is no director.
     */
    public void fire() throws IllegalActionException  {
        super.fire();
        if (_nextPort.hasToken(0)) {
            ScalarToken readToken = (ScalarToken)_nextPort.get(0);
            if (_debugging) {
                _debug("Read input token from " + _nextPort.getName()+" with value "+readToken);
            }
            if (_recordedToken == null) {
                $ASSIGN$_tentativeRecordedToken(readToken);
                $ASSIGN$_tentativeReadFromA(true);
                $ASSIGN$_tentativeNextPort(inputB);
            } else {
                if ((readToken.isLessThan(_recordedToken)).booleanValue()) {
                    output.send(0, readToken);
                    if (_debugging) {
                        _debug("Sent output token with value " + readToken);
                    }
                    if (_nextPort == inputA) {
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.