fixed problem with tail-recursion in context analysis

recursive (only tail recursion) causes problems because context of rule
call causes endless recursion in getNextElementsInContext()

example:

S: {S} $$ p0 $$?=> 's'
 |     $$ p1 $$?=> 's' s=S
;

solution: added set of visited rule calls to parameter list, skip rule
call if it was already handled by another recursion

added test cases
This commit is contained in:
overflowerror 2022-01-18 17:15:45 +01:00
parent 9c6b8673d0
commit 9dfc0b4bfc
3 changed files with 61 additions and 5 deletions

View file

@ -8,6 +8,8 @@
*******************************************************************************/
package org.eclipse.xtext.xtext.generator.hoisting;
import javax.management.RuntimeErrorException;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.xml.type.XMLTypePackage;
import org.eclipse.xtext.AbstractRule;
@ -1079,4 +1081,47 @@ public class HoistingProcessorTest extends AbstractXtextTests {
assertTrue(guard.hasTerminal());
assertEquals("(((" + getSyntaxForKeywordToken("a", 2) + " && " + getSyntaxForEofToken(2) + ") || (p0)) && (" + getSyntaxForKeywordToken("b", 2) + " || (p1)))", guard.render());
}
@Test
public void testRecursiveContextWithIntermediate_expectCorrectResult() throws Exception {
// @formatter:off
String model =
MODEL_PREAMBLE +
"tokenLimit 4\n" +
"hoistingDebug\n" +
"S: a=A ;\n" +
"A: $$ p0 $$?=> 'a' \n" +
" | $$ p1 $$?=> 'a' s=S ;\n";
// @formatter:off
XtextResource resource = getResourceFromString(model);
Grammar grammar = ((Grammar) resource.getContents().get(0));
hoistingProcessor.init(grammar);
AbstractRule rule = getRule(grammar, "A");
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
assertFalse(guard.isTrivial());
assertTrue(guard.hasTerminal());
assertEquals("((" + getSyntaxForEofToken(2) + " || (p0)) && (" + getSyntaxForKeywordToken("a", 2) + " || (p1)))", guard.render());
}
@Test
public void testRecursiveContextNoIntermediate_expectCorrectResult() throws Exception {
// @formatter:off
String model =
MODEL_PREAMBLE +
"tokenLimit 4\n" +
"hoistingDebug\n" +
"S: $$ p0 $$?=> 'a' \n" +
" | $$ p1 $$?=> 'a' s=S ;\n";
// @formatter:off
XtextResource resource = getResourceFromString(model);
Grammar grammar = ((Grammar) resource.getContents().get(0));
hoistingProcessor.init(grammar);
AbstractRule rule = getRule(grammar, "S");
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
assertFalse(guard.isTrivial());
assertTrue(guard.hasTerminal());
assertEquals("((" + getSyntaxForEofToken(2) + " || (p0)) && (" + getSyntaxForKeywordToken("a", 2) + " || (p1)))", guard.render());
}
}

View file

@ -329,7 +329,7 @@ public class HoistingProcessor {
}
private HoistingGuard findGuardForUnorderedGroup(UnorderedGroup element, AbstractRule currentRule) {
// Unordered group (A & B) is the same as (A | B)+ or (A | B)* (is A and B are optional)
// Unordered group (A & B) is the same as (A | B)+ or (A | B)* (if A and B are optional)
// but the cardinality doesn't matter for hoisting
// if A and B are optional the guard for the alternatives need to check the context
// if not the alternatives are actual alternatives

View file

@ -16,6 +16,7 @@ import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
@ -61,6 +62,10 @@ public class TokenAnalysis {
}
private List<AbstractElement> getNextElementsInContext(AbstractElement last) {
return getNextElementsInContext(last, new HashSet<>());
}
private List<AbstractElement> getNextElementsInContext(AbstractElement last, Set<AbstractElement> visited) {
List<AbstractElement> result = new ArrayList<>();
AbstractElement _last = last;
@ -95,7 +100,9 @@ public class TokenAnalysis {
if (compoundContainer == null) {
// no container element; this is last element in a rule definition
AbstractRule rule = containingRule(last);
List<RuleCall> calls = findAllRuleCalls(grammar, rule);
List<RuleCall> calls = findAllRuleCalls(grammar, rule).stream()
.filter(Predicate.not(visited::contains))
.collect(Collectors.toList());
if (calls.isEmpty()) {
// has to be start rule
@ -104,7 +111,9 @@ public class TokenAnalysis {
}
for (RuleCall call : calls) {
result.addAll(getNextElementsInContext(call));
Set<AbstractElement> _visited = new HashSet<>(visited);
_visited.add(call);
result.addAll(getNextElementsInContext(call, _visited));
}
} else if (compoundContainer instanceof Group) {
List<AbstractElement> elements = compoundContainer.getElements();
@ -139,13 +148,13 @@ public class TokenAnalysis {
result.add(compoundContainer);
}
result.addAll(getNextElementsInContext(compoundContainer));
result.addAll(getNextElementsInContext(compoundContainer, visited));
}
} else if (compoundContainer instanceof UnorderedGroup) {
result.addAll(compoundContainer.getElements().stream()
.collect(Collectors.toList())
);
result.addAll(getNextElementsInContext(compoundContainer));
result.addAll(getNextElementsInContext(compoundContainer, visited));
} else {
throw new IllegalArgumentException("unknown compound element: " + container.eClass().getName());
}
@ -188,6 +197,7 @@ public class TokenAnalysis {
if (path.isDone()) {
result = result.merge(path);
} else {
log.info("context analysis failed");
throw new TokenAnalysisAbortedException("context analysis failed");
}
}
@ -495,6 +505,7 @@ public class TokenAnalysis {
return true;
}
} catch (TokenAnalysisAbortedException e) {
log.info("token combinations: " + e.getMessage());
// tokens exhausted; abort current prefix
// set limit for calling functions so this index is not checked again
limit.set(i);