Examples of Token


Examples of cc.mallet.types.Token

        while (lexer.hasNext()) {
          lexer.next ();
          int tokStart = textStart + lexer.getStartOffset ();
          int tokEnd = textStart + lexer.getEndOffset ();
          dataTokens.add (new StringSpan (string, tokStart, tokEnd));
          targetTokens.add (new Token (tag));
        }
      }
      textStart = nextStart;
      tag = nextTag;
    }
View Full Code Here

Examples of com.Ostermiller.Syntax.Lexer.Token

        if (colorStartPos != -1)
        {
          try
          {
            final SchemaInfo si = _session.getSchemaInfo();
            Token t;
            synchronized (doclock)
            {
              // we are playing some games with the lexer for efficiency.
              // we could just create a new lexer each time here, but instead,
              // we will just reset it so that it thinks it is starting at the
              // beginning of the document but reporting a funny start colorStartPos.
              // Reseting the lexer causes the close() method on the reader
              // to be called but because the close() method has no effect on the
              // DocumentReader, we can do this.
              syntaxLexer.reset(
                documentReader,
                0,
                colorStartPos,
                0);
              // After the lexer has been set up, scroll the reader so that it
              // is in the correct spot as well.
              documentReader.seek(colorStartPos);
              // we will highlight tokens until we reach a good stopping place.
              // the first obvious stopping place is the end of the document.
              // the lexer will return null at the end of the document and wee
              // need to stop there.
              t = getNextToken();
            }
                  SimpleAttributeSet errStyle = getMyStyle(IConstants.IStyleNames.ERROR);
                  ErrorInfo[] errInfoClone = _currentErrorInfos.toArray(new ErrorInfo[0]);
            while (t != null && t.getCharEnd() <= colorStartPos + colorLen + 1)
            {
              // this is the actual command that colors the stuff.
              // Color stuff with the description of the style matched
              // to the hash table that has been set up ahead of time.
              synchronized (doclock)
              {
                if (t.getCharEnd() <= document.getLength())
                {
                  String type = t.getDescription();
                  if (type.equals(IConstants.IStyleNames.IDENTIFIER))
                  {
                    final String data = t.getContents();
                    if (si.isTable(data))
                    {
                      type = IConstants.IStyleNames.TABLE;
                                 if(fireTableOrViewFoundEvent)
                                 {
                         fireTableOrViewFound(t.getContents());
                       }

                                 String upperCaseTableName = data.toUpperCase();
                                 if(false == _knownTables.contains(upperCaseTableName))
                                 {
                                    _knownTables.put(upperCaseTableName, upperCaseTableName);
                                    recolorColumns(upperCaseTableName);
                                 }

                    }
                    else if (si.isColumn(data))
                    {
                      type = IConstants.IStyleNames.COLUMN;
                    }
                    else if (si.isDataType(data))
                    {
                      type = IConstants.IStyleNames.DATA_TYPE;
                    }
                    else if (si.isKeyword(data))
                    {
                      type = IConstants.IStyleNames.RESERVED_WORD;
                    }
                  }

                           int begin = t.getCharBegin();
                           int len = t.getCharEnd() - t.getCharBegin();

                           SimpleAttributeSet myStyle = null;
                           for (int i = 0; i < errInfoClone.length; i++)
                           {
                              if (    isBetween(errInfoClone[i].beginPos, errInfoClone[i].endPos, begin)
View Full Code Here

Examples of com.alibaba.druid.sql.parser.Token

      stmt.setIfNotExiists(true);
    }

    stmt.setName(this.exprParser.name());

    Token start = null;
    if (lexer.token() == (Token.LPAREN)) {
      start = Token.LPAREN;
    } else if (lexer.token() == (Token.LBRACE)) {
      start = Token.LBRACE;
    }
View Full Code Here

Examples of com.antlersoft.parser.Token

    if ( size>0)
    {
      Member o=m_contents.get( size-1);
      if ( o instanceof TokenHolder)
      {
        Token t=((TokenHolder)o).m_token;
        if ( t.symbol==Parser._end_ || t.symbol==scope.findReserved(";"))
          m_contents.remove( size-1);
      }
    }
  }
View Full Code Here

Examples of com.atilika.kuromoji.Token

        String input = "シロクロ";
        String[] surfaceForms = {"シロ", "クロ"};
        List<Token> tokens = tokenizer.tokenize(input);

        assertEquals(surfaceForms.length, tokens.size());
        Token token = tokens.get(1);
        String actual = token.getSurfaceForm() + "\t" + token.getAllFeatures();
        assertEquals("クロ\tカスタム名詞,*,*,*,*,*,*,クロ,*", actual);
    }
View Full Code Here

Examples of com.atlassian.connect.play.java.token.Token

        }
    }

    public static void refreshToken(boolean allowInsecurePolling)
    {
        final Token token = new Token(AC.getAcHost().getKey(), AC.getUser(), System.currentTimeMillis(), allowInsecurePolling);

        final String jsonToken = Base64.encodeBase64String(token.toJson().toString().getBytes());
        final String encryptedToken = Crypto.encryptAES(jsonToken);

        getHttpContext().args.put(AC_TOKEN, encryptedToken);
    }
View Full Code Here

Examples of com.creativewidgetworks.goldparser.engine.Token

     * grammars where indentation is important (e.g. Python, etc.)
     * @return Token
     */
    @Override
    protected Token nextToken() {
        Token token;

        // Indent virtual terminals
        if (useIndentVirtualTerminals) {
            if (ivtTokens.isEmpty()) {
                // Get next token from stream
                token = produceToken();
               
                // Token's position in source - initialize indentLevel stack
                Position position = token.getPosition();
                if (ivtIndentLevels.isEmpty()) {
                    ivtLine = position.getLine();
                    ivtIndentLevels.push(position);
                }
               
                // Trigger on change of line number
                if (position.getLine() != ivtLine) {
                    ivtLine = position.getLine();
                    int ivtColumn = ivtIndentLevels.peek().getColumn();
                    if (position.getColumn() > ivtColumn) {
                        ivtTokens.push(token);
                        ivtIndentLevels.push(position);
                        token = new Token(getSymbolByName(VT_INDENT_INCREASE), getIndentLevels(), position);
                    } else if (token.getPosition().getColumn() < ivtColumn) {
                        ivtTokens.push(token);
                        while (!ivtIndentLevels.isEmpty() && ivtIndentLevels.peek().getColumn() > position.getColumn()) {
                            ivtIndentLevels.pop();
                            ivtTokens.push(new Token(getSymbolByName(VT_INDENT_DECREASE), getIndentLevels(), position));
                        }
                        token = ivtTokens.pop();
                    }
                }
               
View Full Code Here

Examples of com.drighetto.essai.bouncycastle.asn1.Token

   *         11 mars 07<br>
   * @param args
   */
  @SuppressWarnings("boxing")
  public static void main(String[] args) {
    Token token1 = null;
    Token token2 = null;
    byte[] userId = "Dominique_Dominique".getBytes();
    byte[] tempEncrypt = null;
    byte[] tempDecrypt = null;
    byte[] msgToCrypt = null;
    byte[] tempEncodedBase64 = null;
    byte[] tempDecodedBase64 = null;
    byte[] tempBytes = null;
    byte[] myBytes = new byte[15];
    Date creationDate = new Date();
    ASN1Sequence sequenceASN1 = null;
    ASN1InputStream inASN1 = null;
    SymmetricKeyCipher symmetricKeyCipher = null;
    int i = 0;
    int myInt = 30;

    /*
     * Sample : Creating a ASN1 structure, Encrypt and decrypt it with
     * symmetric key algorithm (check equality before and after), Encode and
     * decode it in Base64 (check equality before and after).
     */
    System.out.println("---[Bouncy Castle Sample]---");
    try {
      /* ASN1 Sample */
      myBytes[0] = 1;
      myBytes[1] = 2;
      myBytes[2] = 4;
      myBytes[8] = -3;
      System.out.println("[ASN1 Sample]");
      System.out
          .println("**Sample 1 : Conctructor with direct java object**");
      token1 = new Token(userId, myBytes, myInt, creationDate);
      inASN1 = new ASN1InputStream(token1.getEncoded());
      System.out.println("DumpAsString on Java Object");
      System.out.println(ASN1Dump.dumpAsString(token1));
      System.out.println("DumpAsString on Object from ASN1InputStream");
      System.out.println(ASN1Dump.dumpAsString(inASN1.readObject()));
      // -
      System.out.println("**Sample 2 : Conctructor with ASN1 sequence**");
      sequenceASN1 = ASN1Sequence.getInstance(token1.toASN1Object());
      token2 = new Token(sequenceASN1);
      inASN1 = new ASN1InputStream(token2.getEncoded());
      System.out.println("DumpAsString on Java Object");
      System.out.println(ASN1Dump.dumpAsString(token2));
      System.out.println("DumpAsString on Object from ASN1InputStream");
      System.out.println(ASN1Dump.dumpAsString(inASN1.readObject()));
      System.out.println("--");
      System.out.println("------");
      System.out.println("--\n");
      /* $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ */
      /* Cipher Sample */
      System.out.println("[Cipher Sample]");
      symmetricKeyCipher = new SymmetricKeyCipher();
      token1 = new Token(userId, myBytes, myInt, creationDate);
      msgToCrypt = token1.getEncoded();
      System.out.println("Token As String before encryption : ");
      System.out.println(ASN1Dump.dumpAsString(token1));
      System.out.println("**Sample 1 : Encryption**");
      System.out.println("Original bytes : ");
      System.out.print("[");
      for (i = 0; i < msgToCrypt.length; i++) {
        System.out.print(msgToCrypt[i]);
        System.out.print(";");
      }
      System.out.println("]");
      tempEncrypt = symmetricKeyCipher.encrypt(msgToCrypt);
      System.out.println("Encrypted bytes : ");
      System.out.print("[");
      for (i = 0; i < tempEncrypt.length; i++) {
        System.out.print(tempEncrypt[i]);
        System.out.print(";");
      }
      System.out.println("]");
      System.out.println("Encrypted bytes in HEX :");
      System.out.println(new String(Hex.encode(tempEncrypt)));
      System.out.println("**Sample 2 : Decryption**");
      tempDecrypt = symmetricKeyCipher.decrypt(tempEncrypt);
      System.out.println("Decrypted bytes : ");
      System.out.print("[");
      for (i = 0; i < tempDecrypt.length; i++) {
        System.out.print(tempDecrypt[i]);
        System.out.print(";");
      }
      System.out.println("]\n");
      System.out.println("Token As String after decryption : ");
      sequenceASN1 = ASN1Sequence.getInstance(ASN1Object
          .fromByteArray(tempDecrypt));
      token2 = new Token(sequenceASN1);
      System.out.println(ASN1Dump.dumpAsString(token2));
      if (ASN1Dump.dumpAsString(token1).equals(
          ASN1Dump.dumpAsString(token2))) {
        System.out
            .println("OK - Token is equal before and after encryption/decryption !");
      } else {
        System.err
            .println("KO - Token isn't equal before and after encryption/decryption !");
      }
      System.out.println("\n--");
      System.out.println("------");
      System.out.println("--\n");
      /* $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ */
      /* Base64 Sample */
      System.out.println("[Base64 Sample]");
      tempBytes = token2.getEncoded();
      System.out.println("Original bytes : ");
      System.out.print("[");
      for (i = 0; i < tempBytes.length; i++) {
        System.out.print(tempBytes[i]);
        System.out.print(";");
      }
      System.out.println("]");
      tempEncodedBase64 = Base64.encode(tempBytes);
      System.out.println("Base 64 encoded bytes : ");
      System.out.print("[");
      for (i = 0; i < tempEncodedBase64.length; i++) {
        System.out.print(tempEncodedBase64[i]);
        System.out.print(";");
      }
      System.out.println("]");
      tempDecodedBase64 = Base64.decode(tempEncodedBase64);
      System.out.println("Base 64 decoded bytes : ");
      System.out.print("[");
      for (i = 0; i < tempDecodedBase64.length; i++) {
        System.out.print(tempDecodedBase64[i]);
        System.out.print(";");
      }
      System.out.println("]");
      System.out.println("Token As String after decoding : ");
      sequenceASN1 = ASN1Sequence.getInstance(ASN1Object
          .fromByteArray(tempDecodedBase64));
      token2 = new Token(sequenceASN1);
      System.out.println(ASN1Dump.dumpAsString(token2));
      System.out.println("\n--");
      System.out.println("------");
      System.out.println("--\n");
      /* $$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$ */
      /* DER Objects Access */
      System.out.printf("USER ID          : {%s}\n", new String(token2
          .getUserId().getOctets()));
      System.out.printf("DER APP SPEC[%s] : {", token2.getDerAppSpec()
          .getContents().length);
      for (byte b : token2.getDerAppSpec().getContents()) {
        System.out.printf(" %s ", b);
      }
      System.out.println("}");
      System.out.printf("DER INTEGER      : {%s}\n", token2
          .getDerInteger().getValue());
      System.out.printf("CREATION DATE    : {%s}\n", token2
          .getCreationDate().getDate());
      System.out.println("\n\n");
      System.out.println(".:ALL IS OK:.");
    } catch (Exception exp) {
      exp.printStackTrace();
View Full Code Here

Examples of com.dyuproject.oauth.Token

    }
   
    public void doPost(HttpServletRequest request, HttpServletResponse response)
    throws IOException, ServletException
    {
        Token token = _consumer.getToken(_localEndpoint.getConsumerKey(), request);
        switch(token.getState())
        {
            case Token.UNITIALIZED:
                UrlEncodedParameterMap params = new UrlEncodedParameterMap()
                    .add(Constants.OAUTH_CALLBACK, request.getRequestURL().toString());
                   
                Response r = _consumer.fetchToken(_localEndpoint, params, TokenExchange.REQUEST_TOKEN,
                        token);
                if(r.getStatus()==200 && token.getState()==Token.UNAUTHORIZED)
                {
                    // unauthorized request token
                    _consumer.saveToken(token, request, response);
                    StringBuilder urlBuffer = Transport.buildAuthUrl(
                            _localEndpoint.getAuthorizationUrl(), token);
                    response.sendRedirect(urlBuffer.toString());
                }
                break;
               
            case Token.UNAUTHORIZED:
                if(token.authorize(request.getParameter(Constants.OAUTH_TOKEN),
                        request.getParameter(Constants.OAUTH_VERIFIER)))
                {
                    if(fetchAccessToken(token, request, response))
                        queryLocalContacts(token, request, response);
                    else
View Full Code Here

Examples of com.esotericsoftware.yamlbeans.tokenizer.Token

        return null;
      }
    };
    table[P_DOCUMENT_START] = new Production() {
      public Event produce () {
        Token token = tokenizer.peekNextToken();
        DocumentStartEvent documentStartEvent = processDirectives(true);
        if (tokenizer.peekNextTokenType() != DOCUMENT_START)
          throw new ParserException("Expected 'document start' but found: " + token.type);
        tokenizer.getNextToken();
        return documentStartEvent;
      }
    };
    table[P_DOCUMENT_START_IMPLICIT] = new Production() {
      public Event produce () {
        return processDirectives(false);
      }
    };
    table[P_DOCUMENT_END] = new Production() {
      public Event produce () {
        boolean explicit = false;
        while (tokenizer.peekNextTokenType() == DOCUMENT_END) {
          tokenizer.getNextToken();
          explicit = true;
        }
        return explicit ? Event.DOCUMENT_END_TRUE : Event.DOCUMENT_END_FALSE;
      }
    };
    table[P_BLOCK_NODE] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == DIRECTIVE || type == DOCUMENT_START || type == DOCUMENT_END || type == STREAM_END)
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        else if (type == ALIAS)
          parseStack.add(0, table[P_ALIAS]);
        else {
          parseStack.add(0, table[P_PROPERTIES_END]);
          parseStack.add(0, table[P_BLOCK_CONTENT]);
          parseStack.add(0, table[P_PROPERTIES]);
        }
        return null;
      }
    };
    table[P_BLOCK_CONTENT] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == BLOCK_SEQUENCE_START)
          parseStack.add(0, table[P_BLOCK_SEQUENCE]);
        else if (type == BLOCK_MAPPING_START)
          parseStack.add(0, table[P_BLOCK_MAPPING]);
        else if (type == FLOW_SEQUENCE_START)
          parseStack.add(0, table[P_FLOW_SEQUENCE]);
        else if (type == FLOW_MAPPING_START)
          parseStack.add(0, table[P_FLOW_MAPPING]);
        else if (type == SCALAR)
          parseStack.add(0, table[P_SCALAR]);
        else
          throw new ParserException("Expected a sequence, mapping, or scalar but found: " + type);
        return null;
      }
    };
    table[P_PROPERTIES] = new Production() {
      public Event produce () {
        String anchor = null, tagHandle = null, tagSuffix = null;
        if (tokenizer.peekNextTokenType() == ANCHOR) {
          anchor = ((AnchorToken)tokenizer.getNextToken()).getInstanceName();
          if (tokenizer.peekNextTokenType() == TAG) {
            TagToken tagToken = (TagToken)tokenizer.getNextToken();
            tagHandle = tagToken.getHandle();
            tagSuffix = tagToken.getSuffix();
          }
        } else if (tokenizer.peekNextTokenType() == TAG) {
          TagToken tagToken = (TagToken)tokenizer.getNextToken();
          tagHandle = tagToken.getHandle();
          tagSuffix = tagToken.getSuffix();
          if (tokenizer.peekNextTokenType() == ANCHOR) anchor = ((AnchorToken)tokenizer.getNextToken()).getInstanceName();
        }
        String tag = null;
        if (tagHandle != null && !tagHandle.equals("!")) {
          if (!tagHandles.containsKey(tagHandle)) throw new ParserException("Undefined tag handle: " + tagHandle);
          tag = tagHandles.get(tagHandle) + tagSuffix;
        } else
          tag = tagSuffix;
        anchors.add(0, anchor);
        tags.add(0, tag);
        return null;
      }
    };
    table[P_PROPERTIES_END] = new Production() {
      public Event produce () {
        anchors.remove(0);
        tags.remove(0);
        return null;
      }
    };
    table[P_FLOW_CONTENT] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == FLOW_SEQUENCE_START)
          parseStack.add(0, table[P_FLOW_SEQUENCE]);
        else if (type == FLOW_MAPPING_START)
          parseStack.add(0, table[P_FLOW_MAPPING]);
        else if (type == SCALAR)
          parseStack.add(0, table[P_SCALAR]);
        else
          throw new ParserException("Expected a sequence, mapping, or scalar but found: " + type);
        return null;
      }
    };
    table[P_BLOCK_SEQUENCE] = new Production() {
      public Event produce () {
        parseStack.add(0, table[P_BLOCK_SEQUENCE_END]);
        parseStack.add(0, table[P_BLOCK_SEQUENCE_ENTRY]);
        parseStack.add(0, table[P_BLOCK_SEQUENCE_START]);
        return null;
      }
    };
    table[P_BLOCK_MAPPING] = new Production() {
      public Event produce () {
        parseStack.add(0, table[P_BLOCK_MAPPING_END]);
        parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY]);
        parseStack.add(0, table[P_BLOCK_MAPPING_START]);
        return null;
      }
    };
    table[P_FLOW_SEQUENCE] = new Production() {
      public Event produce () {
        parseStack.add(0, table[P_FLOW_SEQUENCE_END]);
        parseStack.add(0, table[P_FLOW_SEQUENCE_ENTRY]);
        parseStack.add(0, table[P_FLOW_SEQUENCE_START]);
        return null;
      }
    };
    table[P_FLOW_MAPPING] = new Production() {
      public Event produce () {
        parseStack.add(0, table[P_FLOW_MAPPING_END]);
        parseStack.add(0, table[P_FLOW_MAPPING_ENTRY]);
        parseStack.add(0, table[P_FLOW_MAPPING_START]);
        return null;
      }
    };
    table[P_SCALAR] = new Production() {
      public Event produce () {
        ScalarToken token = (ScalarToken)tokenizer.getNextToken();
        boolean[] implicit = null;
        if (token.getPlain() && tags.get(0) == null || "!".equals(tags.get(0)))
          implicit = new boolean[] {true, false};
        else if (tags.get(0) == null)
          implicit = new boolean[] {false, true};
        else
          implicit = new boolean[] {false, false};
        return new ScalarEvent(anchors.get(0), tags.get(0), implicit, token.getValue(), token.getStyle());
      }
    };
    table[P_BLOCK_SEQUENCE_ENTRY] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == BLOCK_ENTRY) {
          tokenizer.getNextToken();
          TokenType type = tokenizer.peekNextTokenType();
          if (type == BLOCK_ENTRY || type == BLOCK_END) {
            parseStack.add(0, table[P_BLOCK_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          } else {
            parseStack.add(0, table[P_BLOCK_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_BLOCK_NODE]);
          }
        }
        return null;
      }
    };
    table[P_BLOCK_MAPPING_ENTRY] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == KEY) {
          tokenizer.getNextToken();
          type = tokenizer.peekNextTokenType();
          if (type == KEY || type == VALUE || type == BLOCK_END) {
            parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY]);
            parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY_VALUE]);
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          } else {
            parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY]);
            parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY_VALUE]);
            parseStack.add(0, table[P_BLOCK_NODE_OR_INDENTLESS_SEQUENCE]);
            parseStack.add(0, table[P_PROPERTIES]);
          }
        } else if (type == VALUE) {
          parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY]);
          parseStack.add(0, table[P_BLOCK_MAPPING_ENTRY_VALUE]);
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        }
        return null;
      }
    };
    table[P_BLOCK_MAPPING_ENTRY_VALUE] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == VALUE) {
          tokenizer.getNextToken();
          type = tokenizer.peekNextTokenType();
          if (type == KEY || type == VALUE || type == BLOCK_END)
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          else {
            parseStack.add(0, table[P_BLOCK_NODE_OR_INDENTLESS_SEQUENCE]);
            parseStack.add(0, table[P_PROPERTIES]);
          }
        } else if (type == KEY) parseStack.add(0, table[P_EMPTY_SCALAR]);
        return null;
      }
    };
    table[P_BLOCK_NODE_OR_INDENTLESS_SEQUENCE] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == ALIAS)
          parseStack.add(0, table[P_ALIAS]);
        else if (type == BLOCK_ENTRY) {
          parseStack.add(0, table[P_INDENTLESS_BLOCK_SEQUENCE]);
        } else {
          parseStack.add(0, table[P_BLOCK_CONTENT]);
        }
        return null;
      }
    };
    table[P_BLOCK_SEQUENCE_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        tokenizer.getNextToken();
        return new SequenceStartEvent(anchors.get(0), tags.get(0), implicit, false);
      }
    };
    table[P_BLOCK_SEQUENCE_END] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() != BLOCK_END)
          throw new ParserException("Expected a 'block end' but found: " + tokenizer.peekNextTokenType());
        tokenizer.getNextToken();
        return Event.SEQUENCE_END;
      }
    };
    table[P_BLOCK_MAPPING_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        tokenizer.getNextToken();
        return new MappingStartEvent(anchors.get(0), tags.get(0), implicit, false);
      }
    };
    table[P_BLOCK_MAPPING_END] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() != BLOCK_END)
          throw new ParserException("Expected a 'block end' but found: " + tokenizer.peekNextTokenType());
        tokenizer.getNextToken();
        return Event.MAPPING_END;
      }
    };
    table[P_INDENTLESS_BLOCK_SEQUENCE] = new Production() {
      public Event produce () {
        parseStack.add(0, table[P_BLOCK_INDENTLESS_SEQUENCE_END]);
        parseStack.add(0, table[P_INDENTLESS_BLOCK_SEQUENCE_ENTRY]);
        parseStack.add(0, table[P_BLOCK_INDENTLESS_SEQUENCE_START]);
        return null;
      }
    };
    table[P_BLOCK_INDENTLESS_SEQUENCE_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        return new SequenceStartEvent(anchors.get(0), tags.get(0), implicit, false);
      }
    };
    table[P_INDENTLESS_BLOCK_SEQUENCE_ENTRY] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == BLOCK_ENTRY) {
          tokenizer.getNextToken();
          TokenType type = tokenizer.peekNextTokenType();
          if (type == BLOCK_ENTRY || type == KEY || type == VALUE || type == BLOCK_END) {
            parseStack.add(0, table[P_INDENTLESS_BLOCK_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          } else {
            parseStack.add(0, table[P_INDENTLESS_BLOCK_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_BLOCK_NODE]);
          }
        }
        return null;
      }
    };
    table[P_BLOCK_INDENTLESS_SEQUENCE_END] = new Production() {
      public Event produce () {
        return Event.SEQUENCE_END;
      }
    };
    table[P_FLOW_SEQUENCE_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        tokenizer.getNextToken();
        return new SequenceStartEvent(anchors.get(0), tags.get(0), implicit, true);
      }
    };
    table[P_FLOW_SEQUENCE_ENTRY] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() != FLOW_SEQUENCE_END) {
          if (tokenizer.peekNextTokenType() == KEY) {
            parseStack.add(0, table[P_FLOW_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_FLOW_ENTRY_MARKER]);
            parseStack.add(0, table[P_FLOW_INTERNAL_MAPPING_END]);
            parseStack.add(0, table[P_FLOW_INTERNAL_VALUE]);
            parseStack.add(0, table[P_FLOW_INTERNAL_CONTENT]);
            parseStack.add(0, table[P_FLOW_INTERNAL_MAPPING_START]);
          } else {
            parseStack.add(0, table[P_FLOW_SEQUENCE_ENTRY]);
            parseStack.add(0, table[P_FLOW_NODE]);
            parseStack.add(0, table[P_FLOW_ENTRY_MARKER]);
          }
        }
        return null;
      }
    };
    table[P_FLOW_SEQUENCE_END] = new Production() {
      public Event produce () {
        tokenizer.getNextToken();
        return Event.SEQUENCE_END;
      }
    };
    table[P_FLOW_MAPPING_START] = new Production() {
      public Event produce () {
        boolean implicit = tags.get(0) == null || tags.get(0).equals("!");
        tokenizer.getNextToken();
        return new MappingStartEvent(anchors.get(0), tags.get(0), implicit, true);
      }
    };
    table[P_FLOW_MAPPING_ENTRY] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() != FLOW_MAPPING_END) {
          if (tokenizer.peekNextTokenType() == KEY) {
            parseStack.add(0, table[P_FLOW_MAPPING_ENTRY]);
            parseStack.add(0, table[P_FLOW_ENTRY_MARKER]);
            parseStack.add(0, table[P_FLOW_MAPPING_INTERNAL_VALUE]);
            parseStack.add(0, table[P_FLOW_MAPPING_INTERNAL_CONTENT]);
          } else {
            parseStack.add(0, table[P_FLOW_MAPPING_ENTRY]);
            parseStack.add(0, table[P_FLOW_NODE]);
            parseStack.add(0, table[P_FLOW_ENTRY_MARKER]);
          }
        }
        return null;
      }
    };
    table[P_FLOW_MAPPING_END] = new Production() {
      public Event produce () {
        tokenizer.getNextToken();
        return Event.MAPPING_END;
      }
    };
    table[P_FLOW_INTERNAL_MAPPING_START] = new Production() {
      public Event produce () {
        tokenizer.getNextToken();
        return new MappingStartEvent(null, null, true, true);
      }
    };
    table[P_FLOW_INTERNAL_CONTENT] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == VALUE || type == FLOW_ENTRY || type == FLOW_SEQUENCE_END)
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        else
          parseStack.add(0, table[P_FLOW_NODE]);
        return null;
      }
    };
    table[P_FLOW_INTERNAL_VALUE] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == VALUE) {
          tokenizer.getNextToken();
          if (tokenizer.peekNextTokenType() == FLOW_ENTRY || tokenizer.peekNextTokenType() == FLOW_SEQUENCE_END)
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          else
            parseStack.add(0, table[P_FLOW_NODE]);
        } else
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        return null;
      }
    };
    table[P_FLOW_INTERNAL_MAPPING_END] = new Production() {
      public Event produce () {
        return Event.MAPPING_END;
      }
    };
    table[P_FLOW_ENTRY_MARKER] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == FLOW_ENTRY) tokenizer.getNextToken();
        return null;
      }
    };
    table[P_FLOW_NODE] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == ALIAS)
          parseStack.add(0, table[P_ALIAS]);
        else {
          parseStack.add(0, table[P_PROPERTIES_END]);
          parseStack.add(0, table[P_FLOW_CONTENT]);
          parseStack.add(0, table[P_PROPERTIES]);
        }
        return null;
      }
    };
    table[P_FLOW_MAPPING_INTERNAL_CONTENT] = new Production() {
      public Event produce () {
        TokenType type = tokenizer.peekNextTokenType();
        if (type == VALUE || type == FLOW_ENTRY || type == FLOW_MAPPING_END)
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        else {
          tokenizer.getNextToken();
          parseStack.add(0, table[P_FLOW_NODE]);
        }
        return null;
      }
    };
    table[P_FLOW_MAPPING_INTERNAL_VALUE] = new Production() {
      public Event produce () {
        if (tokenizer.peekNextTokenType() == VALUE) {
          tokenizer.getNextToken();
          if (tokenizer.peekNextTokenType() == FLOW_ENTRY || tokenizer.peekNextTokenType() == FLOW_MAPPING_END)
            parseStack.add(0, table[P_EMPTY_SCALAR]);
          else
            parseStack.add(0, table[P_FLOW_NODE]);
        } else
          parseStack.add(0, table[P_EMPTY_SCALAR]);
        return null;
      }
    };
    table[P_ALIAS] = new Production() {
      public Event produce () {
        AliasToken token = (AliasToken)tokenizer.getNextToken();
        return new AliasEvent(token.getInstanceName());
      }
    };
    table[P_EMPTY_SCALAR] = new Production() {
      public Event produce () {
        return new ScalarEvent(null, null, new boolean[] {true, false}, "", (char)0);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.