mirror of
https://github.com/sigmasternchen/xtext-core
synced 2025-03-15 08:18:55 +00:00
fixed path collapse
collapsed paths loose the containing token guard => fixed problem + added positional condition in token sequence guard constructor to not check positions twice (order matters: give local token guard first in case it is sufficient) removed testNestedAlternativesWithSingleTokenDifference, because it is wrong: 'a' 'b' 'd' with p1 satisfies semantic predicates but not the generated guard condition
This commit is contained in:
parent
a83e3627cc
commit
428dfb93d0
11 changed files with 139 additions and 30 deletions
|
@ -722,30 +722,6 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
||||||
assertEquals("(" + getSyntaxForKeywordToken("a", 1) + " || (p0))", guard.render());
|
assertEquals("(" + getSyntaxForKeywordToken("a", 1) + " || (p0))", guard.render());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testNestedAlternativesWithSingleTokenDifference() throws Exception {
|
|
||||||
// @formatter:off
|
|
||||||
String model =
|
|
||||||
MODEL_PREAMBLE +
|
|
||||||
"S: $$ p0 $$?=> a=A \n" +
|
|
||||||
" | $$ p1 $$?=> b=B ;\n" +
|
|
||||||
"A: {A} $$ p2 $$?=> 'a' 'b' 'c' \n" +
|
|
||||||
" | {A} $$ p3 $$?=> 'a' 'c' 'c' ;\n" +
|
|
||||||
"B: {B} 'a' 'c' 'd' \n" +
|
|
||||||
" | {B} 'a' 'b' 'd' ;\n";
|
|
||||||
|
|
||||||
// @formatter:off
|
|
||||||
XtextResource resource = getResourceFromString(model);
|
|
||||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
|
||||||
hoistingProcessor.init(grammar);
|
|
||||||
AbstractRule rule = getRule(grammar, "S");
|
|
||||||
|
|
||||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
|
||||||
assertFalse(guard.isTrivial());
|
|
||||||
assertTrue(guard.hasTerminal());
|
|
||||||
assertEquals("((" + getSyntaxForKeywordToken("b", 2) + " || ((p0) && (p2))) && (" + getSyntaxForKeywordToken("c", 2) + " || ((p0) && (p3))) && (" + getSyntaxForKeywordToken("d", 3) + " || (p1)))", guard.render());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testNestedAlternativesWithNoSingleTokenDifference() throws Exception {
|
public void testNestedAlternativesWithNoSingleTokenDifference() throws Exception {
|
||||||
// @formatter:off
|
// @formatter:off
|
||||||
|
@ -772,7 +748,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testNestedAlternativesWithCollapsablePaths() throws Exception {
|
public void testNestedAlternativesWithCollapsablePathsWithSingleDifferencePosition() throws Exception {
|
||||||
// @formatter:off
|
// @formatter:off
|
||||||
String model =
|
String model =
|
||||||
MODEL_PREAMBLE +
|
MODEL_PREAMBLE +
|
||||||
|
@ -794,6 +770,52 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
||||||
assertEquals("((" + getSyntaxForKeywordToken("b", 3) + " || ((p0) && (p2))) && (" + getSyntaxForKeywordToken("c", 3) + " || ((p0) && (p3))) && (" + getSyntaxForKeywordToken("d", 3) + " || (p1)))", guard.render());
|
assertEquals("((" + getSyntaxForKeywordToken("b", 3) + " || ((p0) && (p2))) && (" + getSyntaxForKeywordToken("c", 3) + " || ((p0) && (p3))) && (" + getSyntaxForKeywordToken("d", 3) + " || (p1)))", guard.render());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testNestedAlternativesWithCollapsablePathsWithMultipleDifferencePosition() throws Exception {
|
||||||
|
// @formatter:off
|
||||||
|
String model =
|
||||||
|
MODEL_PREAMBLE +
|
||||||
|
"S: $$ p0 $$?=> a=A \n" +
|
||||||
|
" | $$ p1 $$?=> 'a' 'b' ;\n" +
|
||||||
|
"A: {A} $$ p2 $$?=> 'b' 'a' \n" +
|
||||||
|
" | {A} $$ p3 $$?=> 'b' 'b' ;";
|
||||||
|
|
||||||
|
// @formatter:off
|
||||||
|
XtextResource resource = getResourceFromString(model);
|
||||||
|
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||||
|
hoistingProcessor.init(grammar);
|
||||||
|
AbstractRule rule = getRule(grammar, "S");
|
||||||
|
|
||||||
|
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||||
|
assertFalse(guard.isTrivial());
|
||||||
|
assertTrue(guard.hasTerminal());
|
||||||
|
|
||||||
|
assertEquals("((" + getSyntaxForKeywordToken("a", 2) + " || " + getSyntaxForKeywordToken("b", 1) + " || ((p0) && (p2))) && (" + getSyntaxForKeywordToken("b", 2) + " || " + getSyntaxForKeywordToken("b", 1) + " || ((p0) && (p3))) && (" + getSyntaxForKeywordToken("a", 1) + " || (p1)))", guard.render());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testRecursiveRuleCallingAlternative_expectCorrectGuard() throws Exception {
|
||||||
|
// @formatter:off
|
||||||
|
String model =
|
||||||
|
MODEL_PREAMBLE +
|
||||||
|
"S: $$ p0 $$?=> a=A \n" +
|
||||||
|
" | $$ p1 $$?=> 'a' 'b' 'd' ;\n" +
|
||||||
|
"A: $$ p2 $$?=> 'a' a=A \n" +
|
||||||
|
" | $$ p3 $$?=> 'b' 'c' ;\n";
|
||||||
|
|
||||||
|
// @formatter:off
|
||||||
|
XtextResource resource = getResourceFromString(model);
|
||||||
|
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||||
|
hoistingProcessor.init(grammar);
|
||||||
|
AbstractRule rule = getRule(grammar, "S");
|
||||||
|
|
||||||
|
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||||
|
assertFalse(guard.isTrivial());
|
||||||
|
assertTrue(guard.hasTerminal());
|
||||||
|
|
||||||
|
assertEquals("((" + getSyntaxForKeywordToken("a", 1) + " || (" + getSyntaxForKeywordToken("c", 3) + " && " + getSyntaxForEofToken(3) + ") || ((p0) && (p2))) && (" + getSyntaxForKeywordToken("c", 1) + " || ((p0) && (p3))) && (" + getSyntaxForKeywordToken("d", 3) + " || (p1)))", guard.render());
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testAlternativeEmptyAndNonEmptyPaths_expectEofCheck() throws Exception {
|
public void testAlternativeEmptyAndNonEmptyPaths_expectEofCheck() throws Exception {
|
||||||
// @formatter:off
|
// @formatter:off
|
||||||
|
|
|
@ -273,7 +273,7 @@ public class HoistingProcessor {
|
||||||
// -> size = 1
|
// -> size = 1
|
||||||
if (size > 1) {
|
if (size > 1) {
|
||||||
try {
|
try {
|
||||||
return StreamUtils.zip(
|
AlternativesGuard result = StreamUtils.zip(
|
||||||
analysis.findMinimalPathDifference(paths).stream()
|
analysis.findMinimalPathDifference(paths).stream()
|
||||||
.map(a -> a.stream()
|
.map(a -> a.stream()
|
||||||
.map(s -> s.stream()
|
.map(s -> s.stream()
|
||||||
|
@ -281,15 +281,21 @@ public class HoistingProcessor {
|
||||||
.collect(Collectors.toList())
|
.collect(Collectors.toList())
|
||||||
)
|
)
|
||||||
.map(TokenSequenceGuard::new)
|
.map(TokenSequenceGuard::new)
|
||||||
|
.peek(g -> log.info(g))
|
||||||
.map(TokenGuard::reduce)
|
.map(TokenGuard::reduce)
|
||||||
|
.peek(g -> log.info(g))
|
||||||
.collect(Collectors.toList())
|
.collect(Collectors.toList())
|
||||||
)
|
)
|
||||||
.map(AlternativeTokenSequenceGuard::new)
|
.map(AlternativeTokenSequenceGuard::new)
|
||||||
.map(TokenGuard::reduce),
|
.peek(g -> log.info(g))
|
||||||
|
.map(TokenGuard::reduce)
|
||||||
|
.peek(g -> log.info(g)),
|
||||||
guards.stream(),
|
guards.stream(),
|
||||||
(TokenGuard tokenGuard, MergedPathGuard pathGuard) -> Tuples.pair(tokenGuard, pathGuard)
|
(TokenGuard tokenGuard, MergedPathGuard pathGuard) -> Tuples.pair(tokenGuard, pathGuard)
|
||||||
).map(p -> new PathGuard(p.getFirst(), p.getSecond()))
|
).map(p -> new PathGuard(p.getFirst(), p.getSecond()))
|
||||||
.collect(AlternativesGuard.collector());
|
.collect(AlternativesGuard.collector());
|
||||||
|
log.info(result);
|
||||||
|
return result;
|
||||||
} catch(NestedPrefixAlternativesException e) {
|
} catch(NestedPrefixAlternativesException e) {
|
||||||
// nested prefix alternatives
|
// nested prefix alternatives
|
||||||
// -> flatten paths to alternative and try again
|
// -> flatten paths to alternative and try again
|
||||||
|
@ -308,7 +314,7 @@ public class HoistingProcessor {
|
||||||
log.info(flattened.getElements().size());
|
log.info(flattened.getElements().size());
|
||||||
// TODO: value configurable?
|
// TODO: value configurable?
|
||||||
if (flattened.getElements().size() > 100) {
|
if (flattened.getElements().size() > 100) {
|
||||||
throw new NestedPrefixAlternativesException("nested prefix alternatives cant be analysed because of too many paths");
|
throw new NestedPrefixAlternativesException("nested prefix alternatives can't be analysed because of too many paths");
|
||||||
}
|
}
|
||||||
|
|
||||||
//throw new RuntimeException();
|
//throw new RuntimeException();
|
||||||
|
|
|
@ -10,6 +10,8 @@ package org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.guards;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -56,4 +58,19 @@ public class AlternativeTokenSequenceGuard implements TokenGuard {
|
||||||
")\n";
|
")\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Set<Integer> getPositions() {
|
||||||
|
if (alternatives.isEmpty()) {
|
||||||
|
return new HashSet<>();
|
||||||
|
}
|
||||||
|
|
||||||
|
Set<Set<Integer>> positions = alternatives.stream()
|
||||||
|
.map(TokenGuard::getPositions)
|
||||||
|
.collect(Collectors.toSet());
|
||||||
|
return positions.stream().findAny().get().stream()
|
||||||
|
.filter(p -> positions.stream()
|
||||||
|
.allMatch(s -> s.contains(p)))
|
||||||
|
.collect(Collectors.toSet());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -92,7 +92,7 @@ public class PathGuard implements HoistingGuard {
|
||||||
// construct new path guard and add to result
|
// construct new path guard and add to result
|
||||||
GroupGuard groupGuard = new GroupGuard(destructedPaths.getFirst());
|
GroupGuard groupGuard = new GroupGuard(destructedPaths.getFirst());
|
||||||
groupGuard.add(p.hoistngGuard);
|
groupGuard.add(p.hoistngGuard);
|
||||||
result.add(new PathGuard(p.tokenGuard, groupGuard));
|
result.add(new PathGuard(new TokenSequenceGuard(p.tokenGuard, path.tokenGuard), groupGuard));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -8,6 +8,10 @@
|
||||||
*******************************************************************************/
|
*******************************************************************************/
|
||||||
package org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.guards;
|
package org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.guards;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.token.Token;
|
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.token.Token;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -36,4 +40,9 @@ public class SingleTokenGuard implements TokenGuard {
|
||||||
"\t" + token + "\n" +
|
"\t" + token + "\n" +
|
||||||
")\n";
|
")\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Set<Integer> getPositions() {
|
||||||
|
return new HashSet<>(Arrays.asList(token.getPosition()));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,6 +8,8 @@
|
||||||
*******************************************************************************/
|
*******************************************************************************/
|
||||||
package org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.guards;
|
package org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.guards;
|
||||||
|
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author overflow - Initial contribution and API
|
* @author overflow - Initial contribution and API
|
||||||
*/
|
*/
|
||||||
|
@ -17,5 +19,7 @@ public interface TokenGuard extends Guard {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Set<Integer> getPositions();
|
||||||
|
|
||||||
TokenGuard reduce();
|
TokenGuard reduce();
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,7 +10,12 @@ package org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.guards;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.utils.StreamUtils;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author overflow - Initial contribution and API
|
* @author overflow - Initial contribution and API
|
||||||
|
@ -19,7 +24,30 @@ public class TokenSequenceGuard implements TokenGuard {
|
||||||
private Collection<? extends TokenGuard> sequence;
|
private Collection<? extends TokenGuard> sequence;
|
||||||
|
|
||||||
public TokenSequenceGuard(Collection<? extends TokenGuard> sequence) {
|
public TokenSequenceGuard(Collection<? extends TokenGuard> sequence) {
|
||||||
this.sequence = sequence;
|
Set<Integer> checkedPositions = new HashSet<>();
|
||||||
|
this.sequence = sequence.stream()
|
||||||
|
.flatMap(g -> {
|
||||||
|
if (g instanceof TokenSequenceGuard) {
|
||||||
|
return ((TokenSequenceGuard) g).sequence.stream()
|
||||||
|
.filter(s -> !s.getPositions().stream()
|
||||||
|
.allMatch(checkedPositions::contains))
|
||||||
|
.peek(s -> checkedPositions.addAll(s.getPositions()));
|
||||||
|
} else {
|
||||||
|
Set<Integer> positions = g.getPositions();
|
||||||
|
if (positions.stream()
|
||||||
|
.allMatch(checkedPositions::contains)
|
||||||
|
) {
|
||||||
|
return Stream.empty();
|
||||||
|
} else {
|
||||||
|
checkedPositions.addAll(positions);
|
||||||
|
return Stream.of(g);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}).collect(StreamUtils.collectToLinkedHashSet());
|
||||||
|
}
|
||||||
|
|
||||||
|
public TokenSequenceGuard(TokenGuard ...guards) {
|
||||||
|
this(Arrays.asList(guards));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -67,4 +95,12 @@ public class TokenSequenceGuard implements TokenGuard {
|
||||||
.collect(Collectors.joining("\n")) +
|
.collect(Collectors.joining("\n")) +
|
||||||
")\n";
|
")\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Set<Integer> getPositions() {
|
||||||
|
return sequence.stream()
|
||||||
|
.map(TokenGuard::getPositions)
|
||||||
|
.flatMap(Set::stream)
|
||||||
|
.collect(Collectors.toSet());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -58,4 +58,8 @@ public class EofToken implements Token {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getPosition() {
|
||||||
|
return position;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -66,4 +66,8 @@ public class KeywordToken implements Token {
|
||||||
return keyword;
|
return keyword;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getPosition() {
|
||||||
|
return position;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -68,4 +68,9 @@ public class TerminalRuleToken implements Token {
|
||||||
public AbstractElement getElement() {
|
public AbstractElement getElement() {
|
||||||
return call;
|
return call;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getPosition() {
|
||||||
|
return position;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -53,4 +53,6 @@ public interface Token {
|
||||||
|
|
||||||
throw new NotATokenException(element.eClass().getName());
|
throw new NotATokenException(element.eClass().getName());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int getPosition();
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue