fixed problem with unordered groups and non-trivial cardinalities in

nested prefix alternatives analysis

now flattenPaths() considers following repetitions

new problem: unordered groups and non-trivial cardinalities without
non-optional elements causes explosion of generated alternatives, which
in turn cause the identity analysis to go out of control
example: S: ('a'? | 'b'?)+
with a token limit of 10 the nested prefix alternatives analysis would
generate over 1300 alternatives

current quick fix: limit of alternatives
This commit is contained in:
overflowerror 2021-12-23 18:13:52 +01:00
parent 36f790b37b
commit ebef309674
8 changed files with 320 additions and 89 deletions

View file

@ -14,6 +14,7 @@ import org.eclipse.xtext.AbstractRule;
import org.eclipse.xtext.Alternatives;
import org.eclipse.xtext.Grammar;
import org.eclipse.xtext.Group;
import org.eclipse.xtext.Keyword;
import org.eclipse.xtext.UnorderedGroup;
import org.eclipse.xtext.XtextStandaloneSetup;
import org.eclipse.xtext.resource.XtextResource;
@ -434,7 +435,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
assertEquals("((" + getSyntaxForKeywordToken("a", 1) + " || (p0)) && (" + getSyntaxForKeywordToken("b", 1) + " || (p1)))", guard.render());
}
@Test(expected = TokenAnalysisAbortedException.class)
@Test
public void testUnorderedGroupWithEmptyPathsWithoutContext_expectTokenAnalysisAbortedException() throws Exception {
// @formatter:off
String model =
@ -936,6 +937,44 @@ public class HoistingProcessorTest extends AbstractXtextTests {
);
}
@Test
public void testNestedAlternativesWithIdenticalPrefix_parentElementShouldNotBeChanged_expectContextCheckInResult() throws Exception {
// @formatter:off
String model =
MODEL_PREAMBLE +
"S: {S} $$ p0 $$?=> ('a')? \n" +
" | {S} $$ p1 $$?=> ('b')? ;\n";
// @formatter:off
XtextResource resource = getResourceFromString(model);
Grammar grammar = ((Grammar) resource.getContents().get(0));
hoistingProcessor.init(grammar);
AbstractRule rule = getRule(grammar, "S");
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
assertFalse(guard.isTrivial());
assertTrue(guard.hasTerminal());
assertEquals(
"(" +
"(" + getSyntaxForEofToken(1) + " || (p0) || (p1)) && " +
"(" + getSyntaxForKeywordToken("a", 1) + " || (p0)) && " +
"(" + getSyntaxForKeywordToken("b", 1) + " || (p1))" +
")",
guard.render()
);
// check sizes of groups and cardinalities of keywords
Alternatives alternatives = (Alternatives) rule.getAlternatives();
assertEquals(2, alternatives.getElements().size());
Group group = (Group) alternatives.getElements().get(0);
assertEquals(3, group.getElements().size());
Keyword keyword = (Keyword) group.getElements().get(2);
assertEquals("?", keyword.getCardinality());
group = (Group) alternatives.getElements().get(1);
assertEquals(3, group.getElements().size());
keyword = (Keyword) group.getElements().get(2);
assertEquals("?", keyword.getCardinality());
}
// symbolic analysis not yet implemented
//@Test
public void testAlternativesIdenticalPathsWithSymbolicAnalysis() throws Exception {

View file

@ -16,11 +16,14 @@ import java.util.LinkedList;
import java.util.Map;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.log4j.Logger;
import static org.eclipse.emf.ecore.util.EcoreUtil.*;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.util.EcoreUtil;
import org.eclipse.xtext.AbstractElement;
import org.eclipse.xtext.AbstractRule;
import org.eclipse.xtext.AbstractSemanticPredicate;
@ -28,6 +31,7 @@ import org.eclipse.xtext.Action;
import org.eclipse.xtext.Alternatives;
import org.eclipse.xtext.Assignment;
import org.eclipse.xtext.CompoundElement;
import org.eclipse.xtext.EcoreUtil2;
import org.eclipse.xtext.Grammar;
import org.eclipse.xtext.Group;
import org.eclipse.xtext.JavaAction;
@ -41,7 +45,6 @@ import static org.eclipse.xtext.GrammarUtil.*;
import org.eclipse.xtext.xtext.generator.parser.antlr.JavaCodeUtils;
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.exceptions.NestedPrefixAlternativesException;
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.exceptions.OptionalCardinalityWithoutContextException;
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.exceptions.TokenAnalysisAbortedException;
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.exceptions.UnsupportedConstructException;
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.guards.AlternativeTokenSequenceGuard;
@ -125,13 +128,53 @@ public class HoistingProcessor {
return virtualNopJavaAction;
}
private CompoundElement flattenPaths(CompoundElement original, List<AbstractElement> paths, List<? extends HoistingGuard> guards) {
// work on copy of original to preserve eParent
CompoundElement flattened = copy(original);
private CompoundElement flattenPaths(AbstractElement original, List<AbstractElement> paths, List<? extends HoistingGuard> guards) {
// we need to preserve parent for context analysis
// but we are not allowed to change the tree
// -> clone parent
EObject clonedParent = copy(original.eContainer());
CompoundElement flattened = XtextFactory.eINSTANCE.createAlternatives();
if (clonedParent instanceof CompoundElement) {
setElementsOfCompoundElement((CompoundElement) clonedParent,
((CompoundElement) clonedParent).getElements().stream()
.map(e -> {
if (e == original) {
return flattened;
} else {
return e;
}
}));
} else {
EcoreUtil2.setEParent(flattened, clonedParent);
}
Stream<AbstractElement> pathsStream = paths.stream();
if (original instanceof UnorderedGroup) {
// in case original element is UnorderedGroup add original after each path
// so token analysis can fetches all possible paths correctly
pathsStream = pathsStream
.map(p -> {
Group virtualGroup = XtextFactory.eINSTANCE.createGroup();
setElementsOfCompoundElement(virtualGroup, Arrays.asList(p, copy(original)));
return virtualGroup;
});
}
setElementsOfCompoundElement(flattened,
StreamUtils.zip(
paths.stream()
.map(analysis::getAllPossiblePaths),
pathsStream
.map(analysis::getAllPossiblePaths)
.map(l1 -> l1.stream()
.map(l2 -> l2.stream()
.map(EcoreUtil::copy)
// remove cardinality; token analysis already handled that
.peek(e -> e.setCardinality(null))
.collect(Collectors.toList())
).collect(Collectors.toList())
),
guards.stream()
.map(HoistingGuard.class::cast),
(List<List<AbstractElement>> e, HoistingGuard g) -> Tuples.pair(e, g)
@ -179,6 +222,8 @@ public class HoistingProcessor {
return flattened;
}
boolean hasSeen = false;
private HoistingGuard findGuardForAlternatives(CompoundElement alternatives, AbstractRule currentRule) {
log.info("find guard for alternative");
@ -249,12 +294,20 @@ public class HoistingProcessor {
// -> flatten paths to alternative and try again
// this is very inefficient
log.warn("nested prefix alternatives detected");
log.warn("avoid these since they can't be handled efficiency");
log.warn("avoid these since they can't be handled efficiently");
log.info(abstractElementToString(alternatives));
CompoundElement flattened = flattenPaths(alternatives, paths, guards);
log.info(abstractElementToString(flattened));
log.info(flattened.getElements().size());
// TODO: value configurable?
if (flattened.getElements().size() > 100) {
throw new NestedPrefixAlternativesException("nested prefix alternatives cant be analysed because of too many paths");
}
//throw new RuntimeException();
return findGuardForAlternatives(flattened, currentRule);
} catch(TokenAnalysisAbortedException e) {
throw new TokenAnalysisAbortedException(e.getMessage(), e, currentRule);
@ -285,9 +338,7 @@ public class HoistingProcessor {
AbstractElement element = iterator.next();
String cardinality = element.getCardinality();
if (cardinality == null ||
cardinality.equals("") ||
cardinality.equals("+")) {
if (isTrivialCardinality(element) || isOneOrMoreCardinality(element)) {
HoistingGuard guard = findGuardForElementWithTrivialCardinality(element, currentRule);
groupGuard.add(guard);
@ -300,18 +351,12 @@ public class HoistingProcessor {
groupGuard.setHasTerminal();
break;
}
} else if (cardinality.equals("?") ||
cardinality.equals("*")) {
} else if (isOptionalCardinality(element)) {
// though not technically necessary, rewrite tree to context checks are not needed
// rewrite cardinality to alternatives
// A? B -> A B | B
// A* B -> A+ B | B -> A B (see above)
// we need a clone of the element because we need to set the cardinality without changing the
// original syntax tree
AbstractElement clonedElement = copy(element);
clonedElement.setCardinality(null);
// A* B -> A+ B | B -> A B | B (see above)
// make copy of every element because we can't use any element twice
List<AbstractElement> remainingElementsInGroup = StreamUtils.fromIterator(iterator)
@ -319,33 +364,50 @@ public class HoistingProcessor {
if (remainingElementsInGroup.isEmpty()) {
// B is empty
// context is needed to generate virtual alternatives
throw new OptionalCardinalityWithoutContextException("no context in group", currentRule);
// context will be taken from parent element
HoistingGuard guard = findGuardForOptionalCardinalityWithoutContext(element, currentRule);
groupGuard.add(guard);
if (guard.hasTerminal()) {
groupGuard.setHasTerminal();
}
// there are no following elements
break;
} else {
// B is non-empty
// -> construct context for alternatives
// we need a clone of the element because we need to set the cardinality without changing the
// original syntax tree
AbstractElement clonedElement = copy(element);
clonedElement.setCardinality(null);
// make copy of first branch and add the cloned element
List<AbstractElement> remainingElementsInGroupIncludingCurrent = new LinkedList<>(remainingElementsInGroup);
remainingElementsInGroupIncludingCurrent.add(0, clonedElement);
Group virtualPathRemaining = XtextFactory.eINSTANCE.createGroup();
setElementsOfCompoundElement(virtualPathRemaining, remainingElementsInGroup);
Group virtualPathRemainingPlusCurrent = XtextFactory.eINSTANCE.createGroup();
setElementsOfCompoundElement(virtualPathRemainingPlusCurrent, remainingElementsInGroupIncludingCurrent);
Alternatives virtualAlternatives = XtextFactory.eINSTANCE.createAlternatives();
setElementsOfCompoundElement(virtualAlternatives, Arrays.asList(virtualPathRemaining, virtualPathRemainingPlusCurrent));
// get Guard for virtual alternatives
HoistingGuard guard = findGuardForElementWithTrivialCardinality(virtualAlternatives, currentRule);
groupGuard.add(guard);
if (guard.hasTerminal()) {
groupGuard.setHasTerminal();
}
// following elements are included in alternative, no need to check further
break;
}
// make copy of first branch and add the cloned element
List<AbstractElement> remainingElementsInGroupIncludingCurrent = new LinkedList<>(remainingElementsInGroup);
remainingElementsInGroupIncludingCurrent.add(0, clonedElement);
Group virtualPathRemaining = XtextFactory.eINSTANCE.createGroup();
setElementsOfCompoundElement(virtualPathRemaining, remainingElementsInGroup);
Group virtualPathRemainingPlusCurrent = XtextFactory.eINSTANCE.createGroup();
setElementsOfCompoundElement(virtualPathRemainingPlusCurrent, remainingElementsInGroupIncludingCurrent);
Alternatives virtualAlternatives = XtextFactory.eINSTANCE.createAlternatives();
setElementsOfCompoundElement(virtualAlternatives, Arrays.asList(virtualPathRemaining, virtualPathRemainingPlusCurrent));
// get Guard for virtual alternatives
HoistingGuard guard = findGuardForElementWithTrivialCardinality(virtualAlternatives, currentRule);
groupGuard.add(guard);
if (guard.hasTerminal()) {
groupGuard.setHasTerminal();
}
// following elements are included in alternative, no need to check further
break;
} else {
throw new IllegalArgumentException("unknown cardinality: " + cardinality);
}

View file

@ -141,7 +141,7 @@ public class TokenAnalysis {
}
private TokenAnalysisPaths getTokenPathsContext(AbstractElement last, TokenAnalysisPaths prefix) {
private TokenAnalysisPaths getTokenPathsContext(AbstractElement last, TokenAnalysisPaths prefix, boolean shortcutEndlessLoops) {
log.info("get context for: " + abstractElementToShortString(last));
List<AbstractElement> context = getNextElementsInContext(last);
@ -160,13 +160,13 @@ public class TokenAnalysis {
for (AbstractElement element : context) {
TokenAnalysisPaths path = new TokenAnalysisPaths(prefix);
path = getTokenPaths(element, path, false, false);
path = getTokenPaths(element, path, false, false, shortcutEndlessLoops);
if (!path.isDone() && element != null) {
if (path.getMinPosition() == currentPosition) {
throw new TokenAnalysisAbortedException("no progress in context analysis");
}
path = getTokenPathsContext(element, path);
path = getTokenPathsContext(element, path, shortcutEndlessLoops);
}
if (path.isDone()) {
result = result.merge(path);
@ -179,11 +179,11 @@ public class TokenAnalysis {
return result;
}
private TokenAnalysisPaths getTokenPathsTrivial(Group path, TokenAnalysisPaths prefix) {
private TokenAnalysisPaths getTokenPathsTrivial(Group path, TokenAnalysisPaths prefix, boolean shortcutEndlessLoops) {
TokenAnalysisPaths result = new TokenAnalysisPaths(prefix);
for(AbstractElement element : path.getElements()) {
result = getTokenPaths(element, result, false, false);
result = getTokenPaths(element, result, false, false, shortcutEndlessLoops);
if (result.isDone()) {
break;
}
@ -191,16 +191,18 @@ public class TokenAnalysis {
return result;
}
private TokenAnalysisPaths getTokenPathsTrivial(Alternatives path, TokenAnalysisPaths prefix) {
private TokenAnalysisPaths getTokenPathsTrivial(Alternatives path, TokenAnalysisPaths prefix, boolean shortcutEndlessLoops) {
TokenAnalysisPaths result = TokenAnalysisPaths.empty(prefix);
for(AbstractElement element : path.getElements()) {
result = result.merge(getTokenPaths(element, prefix, false, false));
result = result.merge(getTokenPaths(element, prefix, false, false, shortcutEndlessLoops));
}
return result;
}
private TokenAnalysisPaths getTokenPathsTrivial(UnorderedGroup path, TokenAnalysisPaths prefix) {
private TokenAnalysisPaths getTokenPathsTrivial(UnorderedGroup path, TokenAnalysisPaths prefix, boolean shortcutEndlessLoops) {
log.info("unordered group");
TokenAnalysisPaths result;
TokenAnalysisPaths current;
@ -213,45 +215,65 @@ public class TokenAnalysis {
int currentPosition = result.getMinPosition();
do {
log.info("unordered group loop");
current = TokenAnalysisPaths.empty(result);
current.resetProgress();
for (AbstractElement element : path.getElements()) {
current = current.merge(getTokenPaths(element, result, false, false));
log.info(abstractElementToShortString(element));
log.info(current.hasProgress() + " - " + current.getSize());
current = current.merge(getTokenPaths(element, result, false, false, shortcutEndlessLoops));
log.info(current.hasProgress() + " - " + current.getSize());
}
result.resetProgress();
result = result.merge(current);
if (current.getMinPosition() == currentPosition) {
throw new TokenAnalysisAbortedException("no progress in loop");
// endless loop
// result will never be done since there is no progress to the shortest path
if (shortcutEndlessLoops) {
if (!result.hasProgress()) {
// no progress
// abort endless loop
break;
} else {
// there is still some progress done
continue;
}
} else {
throw new TokenAnalysisAbortedException("no progress in loop");
}
}
result = result.merge(current);
} while(!current.isDone());
return result;
}
private TokenAnalysisPaths getTokenPathsTrivial(AbstractElement path, TokenAnalysisPaths prefix) {
private TokenAnalysisPaths getTokenPathsTrivial(AbstractElement path, TokenAnalysisPaths prefix, boolean shortcutEndlessLoops) {
return new XtextSwitch<TokenAnalysisPaths>() {
@Override
public TokenAnalysisPaths caseGroup(Group group) {
return getTokenPathsTrivial(group, prefix);
return getTokenPathsTrivial(group, prefix, shortcutEndlessLoops);
};
@Override
public TokenAnalysisPaths caseAlternatives(Alternatives alternatives) {
return getTokenPathsTrivial(alternatives, prefix);
return getTokenPathsTrivial(alternatives, prefix, shortcutEndlessLoops);
};
@Override
public TokenAnalysisPaths caseUnorderedGroup(UnorderedGroup unorderedGroup) {
return getTokenPathsTrivial(unorderedGroup, prefix);
return getTokenPathsTrivial(unorderedGroup, prefix, shortcutEndlessLoops);
};
@Override
public TokenAnalysisPaths caseAssignment(Assignment assignment) {
return getTokenPaths(assignment.getTerminal(), prefix, false, false);
return getTokenPaths(assignment.getTerminal(), prefix, false, false, shortcutEndlessLoops);
};
@Override
public TokenAnalysisPaths caseRuleCall(RuleCall call) {
if (isParserRuleCall(call) ||
isEnumRuleCall(call)
) {
return getTokenPaths(call.getRule().getAlternatives(), prefix, false, false);
return getTokenPaths(call.getRule().getAlternatives(), prefix, false, false, shortcutEndlessLoops);
} else {
// go to default case
return null;
@ -274,7 +296,9 @@ public class TokenAnalysis {
}
// analyseContext implies needsLength
private TokenAnalysisPaths getTokenPaths(AbstractElement path, TokenAnalysisPaths prefix, boolean analyseContext, boolean needsLength) {
private TokenAnalysisPaths getTokenPaths(
AbstractElement path, TokenAnalysisPaths prefix, boolean analyseContext, boolean needsLength, boolean shortcutEndlessLoops
) {
if (prefix.isDone()) {
return prefix;
}
@ -290,7 +314,7 @@ public class TokenAnalysis {
if (isOptionalCardinality(path)) {
if (analyseContext) {
result = getTokenPathsContext(path, prefix);
result = getTokenPathsContext(path, prefix, shortcutEndlessLoops);
} else if (needsLength) {
throw new TokenAnalysisAbortedException("token expected but path is optional");
} else {
@ -302,16 +326,17 @@ public class TokenAnalysis {
boolean loop = isMultipleCardinality(path);
int currentPosition = result.getMinPosition();
int currentMinPosition = result.getMinPosition();
do {
TokenAnalysisPaths tokenPaths = getTokenPathsTrivial(path, result);
result.resetProgress();
TokenAnalysisPaths tokenPaths = getTokenPathsTrivial(path, result, shortcutEndlessLoops);
if (tokenPaths.isDone()) {
result = result.merge(tokenPaths);
break;
} else if (analyseContext) {
tokenPaths = getTokenPathsContext(path, tokenPaths);
tokenPaths = getTokenPathsContext(path, tokenPaths, shortcutEndlessLoops);
result = result.merge(tokenPaths);
} else if (needsLength) {
throw new TokenAnalysisAbortedException("requested length not satisfyable");
@ -320,10 +345,23 @@ public class TokenAnalysis {
}
if (loop) {
if (result.getMinPosition() == currentPosition) {
throw new TokenAnalysisAbortedException("no progress in loop");
if (result.getMinPosition() == currentMinPosition) {
// endless loop
// result will never be done since there is no progress to the shortest path
if (shortcutEndlessLoops) {
if (!result.hasProgress()) {
// no progress
// abort endless loop
break;
} else {
// there is still some progress done
continue;
}
} else {
throw new TokenAnalysisAbortedException("no progress in loop");
}
} else {
currentPosition = result.getMinPosition();
currentMinPosition = result.getMinPosition();
}
}
} while (loop);
@ -332,11 +370,11 @@ public class TokenAnalysis {
}
private List<List<Token>> getTokenPaths(AbstractElement path, List<Integer> indexes, boolean analyseContext) throws TokenAnalysisAbortedException {
return getTokenPaths(path, new TokenAnalysisPaths(indexes), analyseContext, true).getTokenPaths();
return getTokenPaths(path, new TokenAnalysisPaths(indexes), analyseContext, true, false).getTokenPaths();
}
private List<List<Token>> getTokenPathsContextOnly(AbstractElement path, List<Integer> indexes) {
return getTokenPathsContext(path, new TokenAnalysisPaths(indexes)).getTokenPaths();
return getTokenPathsContext(path, new TokenAnalysisPaths(indexes), false).getTokenPaths();
}
private boolean arePathsIdenticalSymbolic(AbstractElement path1, AbstractElement path2) throws SymbolicAnalysisFailedException {
@ -364,8 +402,8 @@ public class TokenAnalysis {
List<Integer> range = range(0, i);
// there shouldn't be a TokenAnalysisAbortedException if needsLength is false
tokenPaths1 = getTokenPaths(path1, new TokenAnalysisPaths(range), false, false);
tokenPaths2 = getTokenPaths(path2, new TokenAnalysisPaths(range), false, false);
tokenPaths1 = getTokenPaths(path1, new TokenAnalysisPaths(range), false, false, false);
tokenPaths2 = getTokenPaths(path2, new TokenAnalysisPaths(range), false, false, false);
Set<Set<Token>> tokenListSet1 = tokenPaths1.getTokenPaths().stream().map(HashSet::new).collect(Collectors.toSet());
Set<Set<Token>> tokenListSet2 = tokenPaths2.getTokenPaths().stream().map(HashSet::new).collect(Collectors.toSet());
@ -530,7 +568,7 @@ public class TokenAnalysis {
}
public List<List<AbstractElement>> getAllPossiblePaths(AbstractElement path) {
return getTokenPaths(path, new TokenAnalysisPaths(range(0, config.getTokenLimit() + 1)), false, false)
return getTokenPaths(path, new TokenAnalysisPaths(range(0, config.getTokenLimit() + 1)), false, false, true)
.getTokenPaths()
.stream()
.map(l -> l.stream()

View file

@ -49,15 +49,19 @@ public class TokenAnalysisPath {
}
}
public void add(AbstractElement element) {
public boolean add(AbstractElement element) {
if (isDone())
return;
return false;
if (remainingIndexes.get(0) <= 0) {
path.add(Token.fromElement(element, position));
remainingIndexes.remove(0);
shift();
return true;
} else {
shift();
return false;
}
shift();
}
public List<Token> getTokenPath() {
@ -70,4 +74,33 @@ public class TokenAnalysisPath {
path.stream().map(Token::toString).collect(Collectors.joining(", ")) + "),\n (" +
remainingIndexes.stream().map(Object::toString).collect(Collectors.joining(", ")) + ")\n )";
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((path == null) ? 0 : path.hashCode());
result = prime * result + position;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
TokenAnalysisPath other = (TokenAnalysisPath) obj;
if (path == null) {
if (other.path != null)
return false;
} else if (!path.equals(other.path))
return false;
if (position != other.position)
return false;
return true;
}
}

View file

@ -8,19 +8,21 @@
*******************************************************************************/
package org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.pathAnalysis;
import java.util.LinkedList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.stream.Collectors;
import org.eclipse.xtext.AbstractElement;
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.token.Token;
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.utils.StreamUtils;
/**
* @author overflow - Initial contribution and API
* @author overflow - Initial contribution and APILinkedHashSet
*/
public class TokenAnalysisPaths {
private List<TokenAnalysisPath> tokenPaths = new LinkedList<>();
private LinkedHashSet<TokenAnalysisPath> tokenPaths = new LinkedHashSet<>();
private boolean isEmpty = false;
private boolean hasProgress = false;
public List<List<Token>> getTokenPaths() {
return tokenPaths.stream()
@ -36,23 +38,34 @@ public class TokenAnalysisPaths {
public TokenAnalysisPaths(TokenAnalysisPaths prefix) {
this.tokenPaths = prefix.tokenPaths.stream()
.map(TokenAnalysisPath::new)
.collect(Collectors.toList());
.collect(StreamUtils.collectToLinkedHashSet());
this.hasProgress = prefix.hasProgress;
}
public boolean isDone() {
return !isEmpty && tokenPaths.stream().allMatch(TokenAnalysisPath::isDone);
}
public boolean hasProgress() {
return hasProgress;
}
public void resetProgress() {
hasProgress = false;
}
public void add(AbstractElement element) {
tokenPaths.forEach(p -> p.add(element));
tokenPaths.forEach(p -> hasProgress = p.add(element) || hasProgress);
}
public TokenAnalysisPaths merge(TokenAnalysisPaths other) {
if (isEmpty) {
return other;
} else {
// TODO: implement hashCode and equals to check for duplicates right awaz
this.tokenPaths.addAll(other.tokenPaths);
// set hasProgress if other has progress and progress is merged
if (this.tokenPaths.addAll(other.tokenPaths)) {
this.hasProgress |= other.hasProgress;
}
return this;
}
}
@ -71,6 +84,10 @@ public class TokenAnalysisPaths {
return tokenPaths.stream().map(TokenAnalysisPath::getPosition).mapToInt(Integer::intValue).max().getAsInt();
}
public int getSize() {
return tokenPaths.size();
}
@Override
public String toString() {
if (isEmpty) {

View file

@ -26,6 +26,7 @@ import org.eclipse.xtext.JavaAction;
import org.eclipse.xtext.Keyword;
import org.eclipse.xtext.ParserRule;
import org.eclipse.xtext.RuleCall;
import org.eclipse.xtext.UnorderedGroup;
import org.eclipse.xtext.util.XtextSwitch;
import com.google.common.base.Strings;
@ -66,6 +67,16 @@ public class DebugUtils {
return true;
};
@Override
public Boolean caseUnorderedGroup(UnorderedGroup object) {
builder.append("Unordered Group (\n");
object.getElements().forEach(e -> {
abstractElementToString(e, builder, indentation + 1, calledRules);
});
builder.append(indentationString);
builder.append(")");
return true;
};
@Override
public Boolean caseRuleCall(RuleCall object) {
AbstractRule rule = object.getRule();
if (rule instanceof ParserRule) {
@ -139,6 +150,13 @@ public class DebugUtils {
return true;
};
@Override
public Boolean caseUnorderedGroup(UnorderedGroup object) {
builder.append("UnorderedGroup (");
builder.append(object.getElements().size());
builder.append(")");
return true;
};
@Override
public Boolean caseRuleCall(RuleCall object) {
AbstractRule rule = object.getRule();
if (rule instanceof ParserRule) {

View file

@ -9,14 +9,15 @@
package org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.utils;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.Spliterator;
import java.util.Spliterators;
import java.util.function.BiFunction;
import java.util.function.Supplier;
import java.util.stream.Collector;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import com.google.common.base.Splitter;
/**
* @author overflow - Initial contribution and API
*/
@ -43,4 +44,14 @@ public class StreamUtils {
public static <A> Stream<A> fromIterator(Iterator<A> iterator) {
return StreamSupport.stream(Spliterators.spliteratorUnknownSize(iterator, Spliterator.NONNULL), false);
}
public static <T> Collector<T, ?, LinkedHashSet<T>> collectToLinkedHashSet() {
return Collector.of(
(Supplier<LinkedHashSet<T>>) LinkedHashSet::new,
LinkedHashSet::add,
(a, b) -> {
a.addAll(b);
return a;
}, Collector.Characteristics.IDENTITY_FINISH);
}
}

View file

@ -26,6 +26,7 @@ import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.log4j.Logger;
import org.eclipse.core.runtime.OperationCanceledException;
@ -835,4 +836,16 @@ public class EcoreUtil2 extends EcoreUtil {
return "";
}
public static void setEParent(EObject child, EObject parent) {
List<EReference> possibleReferences = parent.eClass().getEAllContainments().stream()
.filter(EReference::isContainment)
.filter(r -> r.getEReferenceType().isInstance(child))
.collect(Collectors.toList());
if (possibleReferences.size() != 1) {
throw new IllegalArgumentException("no obvious containment reference found");
}
parent.eSet(possibleReferences.get(0), child);
}
}