mirror of
https://github.com/sigmasternchen/xtext-core
synced 2025-03-15 08:18:55 +00:00
fixed context analysis not able to see context tokens
added toString methods in guard classes for debugging
This commit is contained in:
parent
372d0a4a01
commit
d2c6a46071
13 changed files with 157 additions and 21 deletions
|
@ -524,6 +524,27 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
assertEquals("((" + getSyntaxForKeywordToken("a", 1) + " || (p0)) && (" + getSyntaxForKeywordToken("b", 1) + " || (p1)))", guard.render());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAlternativesWithPrefixPathsAndNonEofContext_expectGuardBasedOnContext() throws Exception {
|
||||
// @formatter:off
|
||||
String model =
|
||||
MODEL_PREAMBLE +
|
||||
"S: a=A 's' ;\n" +
|
||||
"A: {A} $$ p0 $$?=> 'a' \n" +
|
||||
" | {A} $$ p1 $$?=> 'a' 'b' ;";
|
||||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "A");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
assertFalse(guard.isTrivial());
|
||||
assertTrue(guard.hasTerminal());
|
||||
System.out.println(guard.toString());
|
||||
assertEquals("((" + getSyntaxForKeywordToken("s", 2) + " || (p0)) && (" + getSyntaxForKeywordToken("b", 2) + " || (p1)))", guard.render());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAlternativeWithDifferentEnumRule() throws Exception {
|
||||
// @formatter:off
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
*******************************************************************************/
|
||||
package org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.guards;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
@ -41,4 +42,17 @@ public class AlternativeTokenSequenceGuard implements TokenGuard {
|
|||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "AlternativeTokenSequenceGuard (\n" +
|
||||
alternatives.stream()
|
||||
.map(Object::toString)
|
||||
.map(s -> Arrays.stream(s.split("\n"))
|
||||
.map(l -> "\t" + l)
|
||||
.collect(Collectors.joining("\n"))
|
||||
).map(s -> s + "\n")
|
||||
.collect(Collectors.joining("\n")) +
|
||||
")\n";
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
*******************************************************************************/
|
||||
package org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.guards;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.function.Predicate;
|
||||
|
@ -69,4 +70,17 @@ public class AlternativesGuard implements HoistingGuard {
|
|||
Collector.Characteristics.CONCURRENT
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "AlternativesGuard (\n" +
|
||||
paths.stream()
|
||||
.map(Object::toString)
|
||||
.map(s -> Arrays.stream(s.split("\n"))
|
||||
.map(l -> "\t" + l)
|
||||
.collect(Collectors.joining("\n"))
|
||||
).map(s -> s + "\n")
|
||||
.collect(Collectors.joining("\n")) +
|
||||
")\n";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
*******************************************************************************/
|
||||
package org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.guards;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
@ -86,4 +87,17 @@ public class GroupGuard implements HoistingGuard {
|
|||
return hasTerminal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "GroupGuard (\n" +
|
||||
elementGuards.stream()
|
||||
.map(Object::toString)
|
||||
.map(s -> Arrays.stream(s.split("\n"))
|
||||
.map(l -> "\t" + l)
|
||||
.collect(Collectors.joining("\n"))
|
||||
).map(s -> s + "\n")
|
||||
.collect(Collectors.joining("\n")) +
|
||||
")\n";
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
*******************************************************************************/
|
||||
package org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.guards;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
@ -62,4 +63,16 @@ public class MergedPathGuard implements HoistingGuard {
|
|||
return pathGuards.get(0).hasTerminal();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "MergedPathGuard (\n" +
|
||||
pathGuards.stream()
|
||||
.map(Object::toString)
|
||||
.map(s -> Arrays.stream(s.split("\n"))
|
||||
.map(l -> "\t" + l)
|
||||
.collect(Collectors.joining("\n"))
|
||||
).map(s -> s + "\n")
|
||||
.collect(Collectors.joining("\n")) +
|
||||
")\n";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,8 +8,10 @@
|
|||
*******************************************************************************/
|
||||
package org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.guards;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.eclipse.xtext.util.Pair;
|
||||
|
||||
|
@ -83,4 +85,16 @@ public class PathGuard implements HoistingGuard {
|
|||
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "PathGuard (\n" +
|
||||
"\ttokenGuard:\n\t" + Arrays.stream(tokenGuard.toString().split("\n"))
|
||||
.map(l -> "\t" + l)
|
||||
.collect(Collectors.joining("\n")) + "\n" +
|
||||
"\thoistngGuard:\n\t" + Arrays.stream(hoistngGuard.toString().split("\n"))
|
||||
.map(l -> "\t" + l)
|
||||
.collect(Collectors.joining("\n")) + "\n" +
|
||||
")\n";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,4 +37,11 @@ public class PredicateGuard implements HoistingGuard {
|
|||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "PredicateGuard (\n" +
|
||||
"\t" + JavaCodeUtils.getSource(element.getCode()).trim() + "\n" +
|
||||
")\n";
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -24,4 +24,11 @@ public class SingleTokenGuard implements TokenGuard {
|
|||
public String render() {
|
||||
return token.negatedCondition();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "SingleTokenGuard (\n" +
|
||||
"\t" + token + "\n" +
|
||||
")\n";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
*******************************************************************************/
|
||||
package org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.guards;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
@ -40,4 +41,17 @@ public class TokenSequenceGuard implements TokenGuard {
|
|||
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "TokenSequenceGuard (\n" +
|
||||
sequence.stream()
|
||||
.map(Object::toString)
|
||||
.map(s -> Arrays.stream(s.split("\n"))
|
||||
.map(l -> "\t" + l)
|
||||
.collect(Collectors.joining("\n"))
|
||||
).map(s -> s + "\n")
|
||||
.collect(Collectors.joining("\n")) +
|
||||
")\n";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -56,28 +56,39 @@ public class TokenAnalysis {
|
|||
this.grammar = grammar;
|
||||
}
|
||||
|
||||
private AbstractElement getContainer(AbstractElement element) {
|
||||
EObject tmp = element.eContainer();
|
||||
while (!(tmp instanceof AbstractElement)) {
|
||||
if (tmp == null) {
|
||||
return null;
|
||||
}
|
||||
tmp = tmp.eContainer();
|
||||
}
|
||||
return (AbstractElement) tmp;
|
||||
}
|
||||
|
||||
private CompoundElement getCompoundContainer(AbstractElement element) {
|
||||
if (element instanceof CompoundElement) {
|
||||
// get container of compoundElement since getContainerOfType
|
||||
// would return the same element
|
||||
EObject tmp = element.eContainer();
|
||||
while (!(tmp instanceof AbstractElement)) {
|
||||
if (tmp == null) {
|
||||
return null;
|
||||
}
|
||||
tmp = tmp.eContainer();
|
||||
element = getContainer(element);
|
||||
if (element == null) {
|
||||
return null;
|
||||
}
|
||||
element = (AbstractElement) tmp;
|
||||
}
|
||||
return getContainerOfType(element, CompoundElement.class);
|
||||
}
|
||||
|
||||
private List<AbstractElement> getNextElementsInContext(AbstractElement last) {
|
||||
|
||||
CompoundElement container = getCompoundContainer(last);
|
||||
while (container instanceof Alternatives) {
|
||||
while (container instanceof Alternatives ||
|
||||
last.eContainer() instanceof Assignment
|
||||
) {
|
||||
// skip alternatives since they have to be covered separately
|
||||
last = container;
|
||||
last = getContainer(last);
|
||||
if (last == null) {
|
||||
return Arrays.asList((AbstractElement) null);
|
||||
}
|
||||
container = getCompoundContainer(last);
|
||||
}
|
||||
|
||||
|
@ -89,7 +100,11 @@ public class TokenAnalysis {
|
|||
} else if (container instanceof Group) {
|
||||
List<AbstractElement> elements = container.getElements();
|
||||
int index = elements.indexOf(last);
|
||||
log.info(index);
|
||||
if (index < 0) {
|
||||
log.error("context analysis: element not part of compound");
|
||||
log.info(last.eClass().getName());
|
||||
log.info(abstractElementToString(container));
|
||||
}
|
||||
if (index < elements.size() - 1) {
|
||||
return Arrays.asList(elements.get(index + 1));
|
||||
} else {
|
||||
|
@ -132,7 +147,7 @@ public class TokenAnalysis {
|
|||
for (AbstractElement element : context) {
|
||||
TokenAnalysisPaths path = new TokenAnalysisPaths(prefix);
|
||||
path = getTokenPaths(element, path, false, false);
|
||||
if (!path.isDone()) {
|
||||
if (!path.isDone() && element != null) {
|
||||
path = getTokenPathsContext(element, path);
|
||||
}
|
||||
if (path.isDone()) {
|
||||
|
@ -265,8 +280,6 @@ public class TokenAnalysis {
|
|||
do {
|
||||
TokenAnalysisPaths tokenPaths = getTokenPathsTrivial(path, result);
|
||||
|
||||
result = result.merge(tokenPaths);
|
||||
|
||||
if (tokenPaths.isDone()) {
|
||||
result = result.merge(tokenPaths);
|
||||
break;
|
||||
|
|
|
@ -46,4 +46,9 @@ public class EofToken implements Token {
|
|||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "EofToken(" + position + ")\n";
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ public class KeywordToken implements Token {
|
|||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "keyword " + keyword.getValue();
|
||||
return "keywordToken(" + keyword.getValue() + ", " + position + ")\n";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -29,7 +29,7 @@ public class TerminalRuleToken implements Token {
|
|||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "terminal " + rule.getName();
|
||||
return "TerminalToken(" + rule.getName() + ", " + position + ")\n";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
Loading…
Reference in a new issue