fixed context analysis not able to see context tokens

added toString methods in guard classes for debugging
This commit is contained in:
overflowerror 2021-12-08 20:17:15 +01:00
parent 372d0a4a01
commit d2c6a46071
13 changed files with 157 additions and 21 deletions

View file

@ -523,6 +523,27 @@ public class HoistingProcessorTest extends AbstractXtextTests {
assertTrue(guard.hasTerminal());
assertEquals("((" + getSyntaxForKeywordToken("a", 1) + " || (p0)) && (" + getSyntaxForKeywordToken("b", 1) + " || (p1)))", guard.render());
}
@Test
public void testAlternativesWithPrefixPathsAndNonEofContext_expectGuardBasedOnContext() throws Exception {
// @formatter:off
String model =
MODEL_PREAMBLE +
"S: a=A 's' ;\n" +
"A: {A} $$ p0 $$?=> 'a' \n" +
" | {A} $$ p1 $$?=> 'a' 'b' ;";
// @formatter:off
XtextResource resource = getResourceFromString(model);
Grammar grammar = ((Grammar) resource.getContents().get(0));
hoistingProcessor.init(grammar);
AbstractRule rule = getRule(grammar, "A");
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
assertFalse(guard.isTrivial());
assertTrue(guard.hasTerminal());
System.out.println(guard.toString());
assertEquals("((" + getSyntaxForKeywordToken("s", 2) + " || (p0)) && (" + getSyntaxForKeywordToken("b", 2) + " || (p1)))", guard.render());
}
@Test
public void testAlternativeWithDifferentEnumRule() throws Exception {

View file

@ -8,6 +8,7 @@
*******************************************************************************/
package org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.guards;
import java.util.Arrays;
import java.util.Collection;
import java.util.stream.Collectors;
@ -40,5 +41,18 @@ public class AlternativeTokenSequenceGuard implements TokenGuard {
return result;
}
@Override
public String toString() {
return "AlternativeTokenSequenceGuard (\n" +
alternatives.stream()
.map(Object::toString)
.map(s -> Arrays.stream(s.split("\n"))
.map(l -> "\t" + l)
.collect(Collectors.joining("\n"))
).map(s -> s + "\n")
.collect(Collectors.joining("\n")) +
")\n";
}
}

View file

@ -8,6 +8,7 @@
*******************************************************************************/
package org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.guards;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.function.Predicate;
@ -69,4 +70,17 @@ public class AlternativesGuard implements HoistingGuard {
Collector.Characteristics.CONCURRENT
);
}
@Override
public String toString() {
return "AlternativesGuard (\n" +
paths.stream()
.map(Object::toString)
.map(s -> Arrays.stream(s.split("\n"))
.map(l -> "\t" + l)
.collect(Collectors.joining("\n"))
).map(s -> s + "\n")
.collect(Collectors.joining("\n")) +
")\n";
}
}

View file

@ -8,6 +8,7 @@
*******************************************************************************/
package org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.guards;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
@ -85,5 +86,18 @@ public class GroupGuard implements HoistingGuard {
public boolean hasTerminal() {
return hasTerminal;
}
@Override
public String toString() {
return "GroupGuard (\n" +
elementGuards.stream()
.map(Object::toString)
.map(s -> Arrays.stream(s.split("\n"))
.map(l -> "\t" + l)
.collect(Collectors.joining("\n"))
).map(s -> s + "\n")
.collect(Collectors.joining("\n")) +
")\n";
}
}

View file

@ -8,6 +8,7 @@
*******************************************************************************/
package org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.guards;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.stream.Collectors;
@ -61,5 +62,17 @@ public class MergedPathGuard implements HoistingGuard {
// only need to check first element since all paths should be identical
return pathGuards.get(0).hasTerminal();
}
@Override
public String toString() {
return "MergedPathGuard (\n" +
pathGuards.stream()
.map(Object::toString)
.map(s -> Arrays.stream(s.split("\n"))
.map(l -> "\t" + l)
.collect(Collectors.joining("\n"))
).map(s -> s + "\n")
.collect(Collectors.joining("\n")) +
")\n";
}
}

View file

@ -8,8 +8,10 @@
*******************************************************************************/
package org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.guards;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.stream.Collectors;
import org.eclipse.xtext.util.Pair;
@ -83,4 +85,16 @@ public class PathGuard implements HoistingGuard {
return result;
}
@Override
public String toString() {
return "PathGuard (\n" +
"\ttokenGuard:\n\t" + Arrays.stream(tokenGuard.toString().split("\n"))
.map(l -> "\t" + l)
.collect(Collectors.joining("\n")) + "\n" +
"\thoistngGuard:\n\t" + Arrays.stream(hoistngGuard.toString().split("\n"))
.map(l -> "\t" + l)
.collect(Collectors.joining("\n")) + "\n" +
")\n";
}
}

View file

@ -37,4 +37,11 @@ public class PredicateGuard implements HoistingGuard {
return false;
}
@Override
public String toString() {
return "PredicateGuard (\n" +
"\t" + JavaCodeUtils.getSource(element.getCode()).trim() + "\n" +
")\n";
}
}

View file

@ -24,4 +24,11 @@ public class SingleTokenGuard implements TokenGuard {
public String render() {
return token.negatedCondition();
}
@Override
public String toString() {
return "SingleTokenGuard (\n" +
"\t" + token + "\n" +
")\n";
}
}

View file

@ -8,6 +8,7 @@
*******************************************************************************/
package org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.guards;
import java.util.Arrays;
import java.util.Collection;
import java.util.stream.Collectors;
@ -40,4 +41,17 @@ public class TokenSequenceGuard implements TokenGuard {
return result;
}
@Override
public String toString() {
return "TokenSequenceGuard (\n" +
sequence.stream()
.map(Object::toString)
.map(s -> Arrays.stream(s.split("\n"))
.map(l -> "\t" + l)
.collect(Collectors.joining("\n"))
).map(s -> s + "\n")
.collect(Collectors.joining("\n")) +
")\n";
}
}

View file

@ -56,28 +56,39 @@ public class TokenAnalysis {
this.grammar = grammar;
}
private AbstractElement getContainer(AbstractElement element) {
EObject tmp = element.eContainer();
while (!(tmp instanceof AbstractElement)) {
if (tmp == null) {
return null;
}
tmp = tmp.eContainer();
}
return (AbstractElement) tmp;
}
private CompoundElement getCompoundContainer(AbstractElement element) {
if (element instanceof CompoundElement) {
// get container of compoundElement since getContainerOfType
// would return the same element
EObject tmp = element.eContainer();
while (!(tmp instanceof AbstractElement)) {
if (tmp == null) {
return null;
}
tmp = tmp.eContainer();
element = getContainer(element);
if (element == null) {
return null;
}
element = (AbstractElement) tmp;
}
return getContainerOfType(element, CompoundElement.class);
}
private List<AbstractElement> getNextElementsInContext(AbstractElement last) {
private List<AbstractElement> getNextElementsInContext(AbstractElement last) {
CompoundElement container = getCompoundContainer(last);
while (container instanceof Alternatives) {
while (container instanceof Alternatives ||
last.eContainer() instanceof Assignment
) {
// skip alternatives since they have to be covered separately
last = container;
last = getContainer(last);
if (last == null) {
return Arrays.asList((AbstractElement) null);
}
container = getCompoundContainer(last);
}
@ -89,7 +100,11 @@ public class TokenAnalysis {
} else if (container instanceof Group) {
List<AbstractElement> elements = container.getElements();
int index = elements.indexOf(last);
log.info(index);
if (index < 0) {
log.error("context analysis: element not part of compound");
log.info(last.eClass().getName());
log.info(abstractElementToString(container));
}
if (index < elements.size() - 1) {
return Arrays.asList(elements.get(index + 1));
} else {
@ -98,7 +113,7 @@ public class TokenAnalysis {
}
} else if (container == null) {
// end of rule
AbstractRule rule = containingRule(last);
AbstractRule rule = containingRule(last);
List<RuleCall> calls = findAllRuleCalls(grammar, rule);
if (calls.isEmpty()) {
@ -121,7 +136,7 @@ public class TokenAnalysis {
private TokenAnalysisPaths getTokenPathsContext(AbstractElement last, TokenAnalysisPaths prefix) {
List<AbstractElement> context = getNextElementsInContext(last);
TokenAnalysisPaths result = TokenAnalysisPaths.empty(prefix);
if (context.isEmpty()) {
@ -129,10 +144,10 @@ public class TokenAnalysis {
throw new TokenAnalysisAbortedException("context analysis failed: no context");
}
for (AbstractElement element : context) {
for (AbstractElement element : context) {
TokenAnalysisPaths path = new TokenAnalysisPaths(prefix);
path = getTokenPaths(element, path, false, false);
if (!path.isDone()) {
if (!path.isDone() && element != null) {
path = getTokenPathsContext(element, path);
}
if (path.isDone()) {
@ -265,8 +280,6 @@ public class TokenAnalysis {
do {
TokenAnalysisPaths tokenPaths = getTokenPathsTrivial(path, result);
result = result.merge(tokenPaths);
if (tokenPaths.isDone()) {
result = result.merge(tokenPaths);
break;

View file

@ -45,5 +45,10 @@ public class EofToken implements Token {
return false;
return true;
}
@Override
public String toString() {
return "EofToken(" + position + ")\n";
}
}

View file

@ -29,7 +29,7 @@ public class KeywordToken implements Token {
@Override
public String toString() {
return "keyword " + keyword.getValue();
return "keywordToken(" + keyword.getValue() + ", " + position + ")\n";
}
@Override

View file

@ -29,7 +29,7 @@ public class TerminalRuleToken implements Token {
@Override
public String toString() {
return "terminal " + rule.getName();
return "TerminalToken(" + rule.getName() + ", " + position + ")\n";
}
@Override