public ParseResult parse(@NotNull ParsePathNode parentNode, boolean optional, int depth, ParserContext context) throws ParseException {
ParserBuilder builder = context.getBuilder();
logBegin(builder, optional, depth);
SequenceElementType elementType = getElementType();
ParsePathNode node = createParseNode(parentNode, builder.getCurrentOffset());
PsiBuilder.Marker marker = builder.mark(node);
int matches = 0;
int matchedTokens = 0;
TokenType tokenType = builder.getTokenType();
boolean isDummyToken = isDummyToken(builder.getTokenText());
boolean isSuppressibleReservedWord =
!elementType.is(ElementTypeAttribute.STATEMENT) &&
isSuppressibleReservedWord(tokenType, node);
if (tokenType != null && !tokenType.isChameleon() && (isDummyToken || isSuppressibleReservedWord || elementType.getLookupCache().canStartWithToken(tokenType))) {
ElementType[] elementTypes = elementType.getElementTypes();
while (node.getCurrentSiblingIndex() < elementTypes.length) {
int index = node.getCurrentSiblingIndex();
// is end of document
if (tokenType == null || tokenType.isChameleon()) {
ParseResultType resultType =
elementType.isOptional(index) && (elementType.isLast(index) || elementType.isOptionalFromIndex(index)) ? ParseResultType.FULL_MATCH :
!elementType.isFirst(index) && !elementType.isOptionalFromIndex(index) && !elementType.isExitIndex(index) ? ParseResultType.PARTIAL_MATCH : ParseResultType.NO_MATCH;
return stepOut(marker, depth, resultType, matchedTokens, node, context);
}
ParseResult result = ParseResult.createNoMatchResult();
// current token can still be part of the iterated element.
//if (elementTypes[i].containsToken(tokenType)) {
if (isDummyToken || elementTypes[index].getLookupCache().canStartWithToken(tokenType) || isSuppressibleReservedWord(tokenType, node)) {
//node = node.createVariant(builder.getCurrentOffset(), i);
result = elementTypes[index].getParser().parse(node, elementType.isOptional(index), depth + 1, context);
if (result.isMatch()) {
matchedTokens = matchedTokens + result.getMatchedTokens();
tokenType = builder.getTokenType();
isDummyToken = isDummyToken(builder.getTokenText());
matches++;
}
}
// not matched and not optional
if (result.isNoMatch() && !elementType.isOptional(index)) {
boolean isWeakMatch = matches < 2 && matchedTokens < 3 && index > 1 && ignoreFirstMatch();
if (elementType.isFirst(index) || elementType.isExitIndex(index) || isWeakMatch || matches == 0) {
//if (isFirst(i) || isExitIndex(i)) {
return stepOut(marker, depth, ParseResultType.NO_MATCH, matchedTokens, node, context);
}
index = advanceLexerToNextLandmark(node, context);
if (index <= 0) {
// no landmarks found or landmark in parent found
return stepOut(marker, depth, ParseResultType.PARTIAL_MATCH, matchedTokens, node, context);
} else {
// local landmarks found
tokenType = builder.getTokenType();
isDummyToken = isDummyToken(builder.getTokenText());
node.setCurrentSiblingIndex(index);
continue;
}
}
// if is last element
if (elementType.isLast(index)) {
//matches == 0 reaches this stage only if all sequence elements are optional
ParseResultType resultType = matches == 0 ? ParseResultType.NO_MATCH : ParseResultType.FULL_MATCH;
return stepOut(marker, depth, resultType, matchedTokens, node, context);
}
node.incrementIndex(builder.getCurrentOffset());
}
}
return stepOut(marker, depth, ParseResultType.NO_MATCH, matchedTokens, node, context);
}