mirror of
https://github.com/sigmasternchen/xtext-core
synced 2025-03-15 08:18:55 +00:00
basic support for simple prefix paths
This commit is contained in:
parent
621f37874f
commit
372d0a4a01
9 changed files with 354 additions and 151 deletions
|
@ -71,6 +71,9 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
private String getSyntaxForTerminalToken(String terminal, int offset) {
|
||||
return "input.LA(" + offset + ") != " + terminal;
|
||||
}
|
||||
private String getSyntaxForEofToken(int offset) {
|
||||
return "input.LA(" + offset + ") != EOF";
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyRule() throws Exception {
|
||||
|
@ -81,6 +84,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -97,6 +101,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -114,6 +119,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -131,6 +137,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -148,6 +155,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -165,6 +173,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -182,6 +191,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -200,6 +210,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -218,6 +229,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -234,6 +246,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -251,6 +264,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -268,6 +282,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -285,6 +300,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -299,6 +315,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -316,6 +333,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -332,6 +350,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -347,6 +366,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -364,6 +384,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -380,6 +401,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -397,6 +419,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -411,6 +434,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -428,6 +452,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -442,6 +467,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -470,6 +496,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -488,6 +515,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -508,6 +536,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -527,6 +556,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -547,6 +577,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -567,6 +598,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -589,6 +621,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -606,6 +639,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -624,6 +658,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -647,6 +682,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -670,6 +706,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -692,6 +729,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -701,8 +739,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
assertEquals("((" + getSyntaxForKeywordToken("b", 3) + " || ((p0) && (p2))) && (" + getSyntaxForKeywordToken("c", 3) + " || ((p0) && (p3))) && (" + getSyntaxForKeywordToken("d", 3) + " || (p1)))", guard.render());
|
||||
}
|
||||
|
||||
@Test(expected = TokenAnalysisAbortedException.class)
|
||||
public void testAlternativeEmptyAndNonEmptyPaths_expectTokenAnalysisAbortedException() throws Exception {
|
||||
public void testAlternativeEmptyAndNonEmptyPaths_expectEofCheck() throws Exception {
|
||||
// @formatter:off
|
||||
String model =
|
||||
MODEL_PREAMBLE +
|
||||
|
@ -711,13 +748,14 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
assertEquals("((" + getSyntaxForKeywordToken("a", 1) + " || (p0)) && (" + getSyntaxForEofToken(1) + " || (p1)))", guard.render());
|
||||
}
|
||||
|
||||
@Test(expected = TokenAnalysisAbortedException.class)
|
||||
public void testAlternativeWithPrefixPath_expectTokenAnalysisAbortedException() throws Exception {
|
||||
public void testAlternativeWithPrefixPath_expectEofCheck() throws Exception {
|
||||
// @formatter:off
|
||||
String model =
|
||||
MODEL_PREAMBLE +
|
||||
|
@ -726,9 +764,11 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
assertEquals("((" + getSyntaxForEofToken(3) + " || (p0)) && (" + getSyntaxForKeywordToken("c", 3) + " || (p1)))", guard.render());
|
||||
}
|
||||
|
||||
@Test(expected = TokenAnalysisAbortedException.class)
|
||||
|
@ -742,6 +782,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
@ -759,6 +800,7 @@ public class HoistingProcessorTest extends AbstractXtextTests {
|
|||
// @formatter:off
|
||||
XtextResource resource = getResourceFromString(model);
|
||||
Grammar grammar = ((Grammar) resource.getContents().get(0));
|
||||
hoistingProcessor.init(grammar);
|
||||
AbstractRule rule = getRule(grammar, "S");
|
||||
|
||||
HoistingGuard guard = hoistingProcessor.findHoistingGuard(rule.getAlternatives());
|
||||
|
|
|
@ -73,6 +73,8 @@ abstract class AbstractAntlrGrammarGenerator {
|
|||
if (!isCombinedGrammar) {
|
||||
fsa.generateFile(grammarNaming.getLexerGrammar(it).grammarFileName, flattened.compileLexer(options))
|
||||
}
|
||||
|
||||
init
|
||||
}
|
||||
|
||||
protected def isCombinedGrammar() {
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.eclipse.xtext.Action;
|
|||
import org.eclipse.xtext.Alternatives;
|
||||
import org.eclipse.xtext.Assignment;
|
||||
import org.eclipse.xtext.CompoundElement;
|
||||
import org.eclipse.xtext.Grammar;
|
||||
import org.eclipse.xtext.GrammarUtil;
|
||||
import org.eclipse.xtext.Group;
|
||||
import org.eclipse.xtext.JavaAction;
|
||||
|
@ -65,8 +66,8 @@ public class HoistingProcessor {
|
|||
private HoistingConfiguration config = new HoistingConfiguration();
|
||||
private TokenAnalysis analysis;
|
||||
|
||||
public HoistingProcessor() {
|
||||
analysis = new TokenAnalysis(config);
|
||||
public void init(Grammar grammar) {
|
||||
analysis = new TokenAnalysis(config, grammar);
|
||||
}
|
||||
|
||||
// TODO: handling for TokenAnalysisAbortedException
|
||||
|
|
|
@ -11,6 +11,7 @@ package org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.pathAnalysis;
|
|||
import static org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.utils.DebugUtils.*;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
@ -20,211 +21,270 @@ import java.util.stream.Collectors;
|
|||
import java.util.stream.IntStream;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.eclipse.emf.ecore.EObject;
|
||||
import org.eclipse.xtext.AbstractElement;
|
||||
import org.eclipse.xtext.AbstractSemanticPredicate;
|
||||
import org.eclipse.xtext.Action;
|
||||
import org.eclipse.xtext.AbstractRule;
|
||||
import org.eclipse.xtext.Alternatives;
|
||||
import org.eclipse.xtext.Assignment;
|
||||
import org.eclipse.xtext.CompoundElement;
|
||||
import org.eclipse.xtext.Grammar;
|
||||
import org.eclipse.xtext.GrammarUtil;
|
||||
import org.eclipse.xtext.Group;
|
||||
import org.eclipse.xtext.JavaAction;
|
||||
import org.eclipse.xtext.RuleCall;
|
||||
import org.eclipse.xtext.UnorderedGroup;
|
||||
import org.eclipse.xtext.util.XtextSwitch;
|
||||
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.HoistingConfiguration;
|
||||
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.exceptions.SymbolicAnalysisFailedException;
|
||||
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.exceptions.TokenAnalysisAbortedException;
|
||||
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.exceptions.UnsupportedConstructException;
|
||||
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.token.Token;
|
||||
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.utils.MutablePrimitiveWrapper;
|
||||
|
||||
import static org.eclipse.xtext.GrammarUtil.*;
|
||||
import static org.eclipse.xtext.EcoreUtil2.*;
|
||||
|
||||
/**
|
||||
* @author overflow - Initial contribution and API
|
||||
*/
|
||||
public class TokenAnalysis {
|
||||
private HoistingConfiguration config;
|
||||
private Grammar grammar;
|
||||
|
||||
private Logger log = Logger.getLogger(TokenAnalysis.class);
|
||||
|
||||
public TokenAnalysis(HoistingConfiguration config) {
|
||||
public TokenAnalysis(HoistingConfiguration config, Grammar grammar) {
|
||||
this.config = config;
|
||||
this.grammar = grammar;
|
||||
}
|
||||
|
||||
private TokenAnalysisPaths getTokenForIndexesAlternatives(CompoundElement path, TokenAnalysisPaths prefix, boolean needsLength) throws TokenAnalysisAbortedException {
|
||||
if (prefix.isDone()) {
|
||||
return prefix;
|
||||
}
|
||||
|
||||
TokenAnalysisPaths result;
|
||||
if (isOptionalCardinality(path)) {
|
||||
result = prefix;
|
||||
if (needsLength) {
|
||||
// analysis is not done but there are no more mandatory tokens
|
||||
throw new TokenAnalysisAbortedException("needed path length not satisfied due to optional cardinality");
|
||||
}
|
||||
} else {
|
||||
result = TokenAnalysisPaths.empty(prefix);
|
||||
}
|
||||
|
||||
boolean loop = isMultipleCardinality(path);
|
||||
|
||||
do {
|
||||
boolean allDone = true;
|
||||
|
||||
for (AbstractElement element : path.getElements()) {
|
||||
TokenAnalysisPaths current = new TokenAnalysisPaths(prefix);
|
||||
current = getTokenForIndexes(element, current, needsLength); // will check for needsLength
|
||||
|
||||
if (!current.isDone()) {
|
||||
allDone = false;
|
||||
private CompoundElement getCompoundContainer(AbstractElement element) {
|
||||
if (element instanceof CompoundElement) {
|
||||
// get container of compoundElement since getContainerOfType
|
||||
// would return the same element
|
||||
EObject tmp = element.eContainer();
|
||||
while (!(tmp instanceof AbstractElement)) {
|
||||
if (tmp == null) {
|
||||
return null;
|
||||
}
|
||||
tmp = tmp.eContainer();
|
||||
}
|
||||
element = (AbstractElement) tmp;
|
||||
}
|
||||
return getContainerOfType(element, CompoundElement.class);
|
||||
}
|
||||
|
||||
result = result.merge(current);
|
||||
private List<AbstractElement> getNextElementsInContext(AbstractElement last) {
|
||||
|
||||
CompoundElement container = getCompoundContainer(last);
|
||||
while (container instanceof Alternatives) {
|
||||
// skip alternatives since they have to be covered separately
|
||||
last = container;
|
||||
container = getCompoundContainer(last);
|
||||
}
|
||||
|
||||
if (container instanceof UnorderedGroup) {
|
||||
List<AbstractElement> result = new ArrayList<>();
|
||||
result.addAll(container.getElements());
|
||||
result.addAll(getNextElementsInContext(container));
|
||||
return result;
|
||||
} else if (container instanceof Group) {
|
||||
List<AbstractElement> elements = container.getElements();
|
||||
int index = elements.indexOf(last);
|
||||
log.info(index);
|
||||
if (index < elements.size() - 1) {
|
||||
return Arrays.asList(elements.get(index + 1));
|
||||
} else {
|
||||
// this is the last element
|
||||
return getNextElementsInContext(container);
|
||||
}
|
||||
} else if (container == null) {
|
||||
// end of rule
|
||||
AbstractRule rule = containingRule(last);
|
||||
List<RuleCall> calls = findAllRuleCalls(grammar, rule);
|
||||
|
||||
if (calls.isEmpty()) {
|
||||
// has to be start rule
|
||||
// context is EOF
|
||||
return Arrays.asList((AbstractElement) null);
|
||||
}
|
||||
|
||||
if (allDone) {
|
||||
List<AbstractElement> result = new ArrayList<>();
|
||||
for (RuleCall call : calls) {
|
||||
result.addAll(getNextElementsInContext(call));
|
||||
}
|
||||
|
||||
return result;
|
||||
} else {
|
||||
throw new IllegalArgumentException("unknown compound element: " + container.eClass().getName());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private TokenAnalysisPaths getTokenPathsContext(AbstractElement last, TokenAnalysisPaths prefix) {
|
||||
List<AbstractElement> context = getNextElementsInContext(last);
|
||||
|
||||
TokenAnalysisPaths result = TokenAnalysisPaths.empty(prefix);
|
||||
|
||||
if (context.isEmpty()) {
|
||||
// TODO: is this special case necessary?
|
||||
throw new TokenAnalysisAbortedException("context analysis failed: no context");
|
||||
}
|
||||
|
||||
for (AbstractElement element : context) {
|
||||
TokenAnalysisPaths path = new TokenAnalysisPaths(prefix);
|
||||
path = getTokenPaths(element, path, false, false);
|
||||
if (!path.isDone()) {
|
||||
path = getTokenPathsContext(element, path);
|
||||
}
|
||||
if (path.isDone()) {
|
||||
result = result.merge(path);
|
||||
} else {
|
||||
throw new TokenAnalysisAbortedException("context analysis failed");
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private TokenAnalysisPaths getTokenPathsTrivial(Group path, TokenAnalysisPaths prefix) {
|
||||
TokenAnalysisPaths result = new TokenAnalysisPaths(prefix);
|
||||
|
||||
for(AbstractElement element : path.getElements()) {
|
||||
result = getTokenPaths(element, result, false, false);
|
||||
if (result.isDone()) {
|
||||
break;
|
||||
}
|
||||
|
||||
prefix = result;
|
||||
|
||||
// repeat until all further extensions of prefix are done
|
||||
} while(loop);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
private TokenAnalysisPaths getTokenPathsTrivial(Alternatives path, TokenAnalysisPaths prefix) {
|
||||
TokenAnalysisPaths result = TokenAnalysisPaths.empty(prefix);
|
||||
|
||||
private TokenAnalysisPaths getTokenForIndexesGroup(Group path, TokenAnalysisPaths prefix, boolean needsLength) throws TokenAnalysisAbortedException {
|
||||
if (prefix.isDone()) {
|
||||
return prefix;
|
||||
for(AbstractElement element : path.getElements()) {
|
||||
result = result.merge(getTokenPaths(element, prefix, false, false));
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
private TokenAnalysisPaths getTokenPathsTrivial(UnorderedGroup path, TokenAnalysisPaths prefix) {
|
||||
TokenAnalysisPaths result;
|
||||
TokenAnalysisPaths current = new TokenAnalysisPaths(prefix);
|
||||
TokenAnalysisPaths current;
|
||||
|
||||
if (isOptionalCardinality(path)) {
|
||||
result = prefix;
|
||||
if (needsLength) {
|
||||
// analysis is not done but there are no more mandatory tokens
|
||||
throw new TokenAnalysisAbortedException("needed path length not satisfied due to optional cardinality");
|
||||
}
|
||||
if (path.getElements().stream().allMatch(GrammarUtil::isOptionalCardinality)) {
|
||||
result = new TokenAnalysisPaths(prefix);
|
||||
} else {
|
||||
result = TokenAnalysisPaths.empty(prefix);
|
||||
}
|
||||
|
||||
boolean loop = isMultipleCardinality(path);
|
||||
|
||||
do {
|
||||
current = TokenAnalysisPaths.empty(result);
|
||||
for (AbstractElement element : path.getElements()) {
|
||||
current = getTokenForIndexes(element, current, false);
|
||||
|
||||
if (current.isDone()) {
|
||||
// no need to look further
|
||||
|
||||
return result.merge(current);
|
||||
}
|
||||
}
|
||||
|
||||
if (needsLength && !current.isDone()) {
|
||||
// analysis is not done but there are no more mandatory tokens
|
||||
throw new TokenAnalysisAbortedException("needed path length not satisfied");
|
||||
current = current.merge(getTokenPaths(element, result, false, false));
|
||||
}
|
||||
|
||||
result = result.merge(current);
|
||||
current = new TokenAnalysisPaths(result);
|
||||
} while(loop);
|
||||
} while(!current.isDone());
|
||||
|
||||
// if cardinality is trivial or ? return result
|
||||
return result;
|
||||
}
|
||||
|
||||
private TokenAnalysisPaths getTokenForIndexesDefault(AbstractElement path, TokenAnalysisPaths prefix, boolean needsLength) throws TokenAnalysisAbortedException {
|
||||
private TokenAnalysisPaths getTokenPathsTrivial(AbstractElement path, TokenAnalysisPaths prefix) {
|
||||
return new XtextSwitch<TokenAnalysisPaths>() {
|
||||
@Override
|
||||
public TokenAnalysisPaths caseGroup(Group group) {
|
||||
return getTokenPathsTrivial(group, prefix);
|
||||
};
|
||||
@Override
|
||||
public TokenAnalysisPaths caseAlternatives(Alternatives alternatives) {
|
||||
return getTokenPathsTrivial(alternatives, prefix);
|
||||
};
|
||||
@Override
|
||||
public TokenAnalysisPaths caseUnorderedGroup(UnorderedGroup unorderedGroup) {
|
||||
return getTokenPathsTrivial(unorderedGroup, prefix);
|
||||
};
|
||||
@Override
|
||||
public TokenAnalysisPaths caseAssignment(Assignment assignment) {
|
||||
return getTokenPaths(assignment.getTerminal(), prefix, false, false);
|
||||
};
|
||||
@Override
|
||||
public TokenAnalysisPaths caseRuleCall(RuleCall call) {
|
||||
if (isParserRuleCall(call) ||
|
||||
isEnumRuleCall(call)
|
||||
) {
|
||||
return getTokenPaths(call.getRule().getAlternatives(), prefix, false, false);
|
||||
} else {
|
||||
// go to default case
|
||||
return null;
|
||||
}
|
||||
};
|
||||
@Override
|
||||
public TokenAnalysisPaths defaultCase(EObject object) {
|
||||
AbstractElement element = (AbstractElement) object;
|
||||
|
||||
if (Token.isToken(element)) {
|
||||
TokenAnalysisPaths result = new TokenAnalysisPaths(prefix);
|
||||
result.add(element);
|
||||
return result;
|
||||
} else {
|
||||
// Actions, Predicates, JavaActions, ...
|
||||
return prefix;
|
||||
}
|
||||
};
|
||||
}.doSwitch(path);
|
||||
}
|
||||
|
||||
// analyseContext implies needsLength
|
||||
private TokenAnalysisPaths getTokenPaths(AbstractElement path, TokenAnalysisPaths prefix, boolean analyseContext, boolean needsLength) {
|
||||
if (prefix.isDone()) {
|
||||
return prefix;
|
||||
}
|
||||
|
||||
TokenAnalysisPaths result;
|
||||
|
||||
if (path == null) {
|
||||
// empty path means EOF
|
||||
result = new TokenAnalysisPaths(prefix);
|
||||
result.add(path);
|
||||
return result;
|
||||
}
|
||||
|
||||
if (isOptionalCardinality(path)) {
|
||||
result = prefix;
|
||||
if (needsLength) {
|
||||
throw new TokenAnalysisAbortedException("needed path length not satisfied due to optional cardinality");
|
||||
if (analyseContext) {
|
||||
result = getTokenPathsContext(path, prefix);
|
||||
} else if (needsLength) {
|
||||
throw new TokenAnalysisAbortedException("token expected but path is optional");
|
||||
} else {
|
||||
result = new TokenAnalysisPaths(prefix);
|
||||
}
|
||||
} else {
|
||||
result = TokenAnalysisPaths.empty(prefix);
|
||||
}
|
||||
|
||||
TokenAnalysisPaths current = new TokenAnalysisPaths(prefix);
|
||||
|
||||
boolean loop = isMultipleCardinality(path);
|
||||
|
||||
do {
|
||||
if (Token.isToken(path)) {
|
||||
current.add(path);
|
||||
} else if (isParserRuleCall(path) ||
|
||||
isEnumRuleCall(path)) {
|
||||
// path doesn't need length, because we're going to check that anyway in this function
|
||||
current = getTokenForIndexes(((RuleCall) path).getRule().getAlternatives(), current, false);
|
||||
} else if (path instanceof Assignment) {
|
||||
current = getTokenForIndexes(((Assignment) path).getTerminal(), current, false);
|
||||
TokenAnalysisPaths tokenPaths = getTokenPathsTrivial(path, result);
|
||||
|
||||
result = result.merge(tokenPaths);
|
||||
|
||||
if (tokenPaths.isDone()) {
|
||||
result = result.merge(tokenPaths);
|
||||
break;
|
||||
} else if (analyseContext) {
|
||||
tokenPaths = getTokenPathsContext(path, tokenPaths);
|
||||
result = result.merge(tokenPaths);
|
||||
} else if (needsLength) {
|
||||
throw new TokenAnalysisAbortedException("requested length not satisfyable");
|
||||
} else {
|
||||
throw new UnsupportedConstructException("unknown element: " + path.eClass().getName());
|
||||
result = result.merge(tokenPaths);
|
||||
}
|
||||
|
||||
// add path to result
|
||||
result = result.merge(current);
|
||||
|
||||
// if current path is done return result
|
||||
// precondition: either !needsLength or result empty
|
||||
// result is only non-empty if ? cardinality
|
||||
// but then needsLength can't be true.
|
||||
if (current.isDone()) {
|
||||
return result;
|
||||
}
|
||||
|
||||
if (needsLength) {
|
||||
throw new TokenAnalysisAbortedException("needed path length not satisfied");
|
||||
}
|
||||
} while(loop);
|
||||
} while (loop);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private TokenAnalysisPaths getTokenForIndexes(AbstractElement path, TokenAnalysisPaths prefix, boolean needsLength) throws TokenAnalysisAbortedException {
|
||||
if (path instanceof Alternatives) {
|
||||
return getTokenForIndexesAlternatives((Alternatives) path, prefix, needsLength);
|
||||
} else if (path instanceof Group) {
|
||||
return getTokenForIndexesGroup((Group) path, prefix, needsLength);
|
||||
} else if (path instanceof UnorderedGroup) {
|
||||
// clone unordered group
|
||||
// set cardinality accordingly
|
||||
// use code for alternatives
|
||||
|
||||
CompoundElement clonedUnorderedGroup = (CompoundElement) cloneAbstractElement(path);
|
||||
if (isOptionalCardinality(path) ||
|
||||
((UnorderedGroup) path).getElements().stream().allMatch(GrammarUtil::isOptionalCardinality)
|
||||
){
|
||||
clonedUnorderedGroup.setCardinality("*");
|
||||
} else {
|
||||
clonedUnorderedGroup.setCardinality("+");
|
||||
}
|
||||
|
||||
// getTokenForIndexesAlternatives only needs a CompoundElement so we can give it
|
||||
// the modified unordered group
|
||||
return getTokenForIndexesAlternatives(clonedUnorderedGroup, prefix, needsLength);
|
||||
} else if (path instanceof Action ||
|
||||
path instanceof AbstractSemanticPredicate ||
|
||||
path instanceof JavaAction
|
||||
) {
|
||||
return prefix;
|
||||
} else {
|
||||
return getTokenForIndexesDefault(path, prefix, needsLength);
|
||||
}
|
||||
}
|
||||
|
||||
private List<List<Token>> getTokenForIndexes(AbstractElement path, List<Integer> indexes) throws TokenAnalysisAbortedException {
|
||||
return getTokenForIndexes(path, new TokenAnalysisPaths(indexes), true).getTokenPaths();
|
||||
private List<List<Token>> getTokenPaths(AbstractElement path, List<Integer> indexes, boolean analyseContext) throws TokenAnalysisAbortedException {
|
||||
return getTokenPaths(path, new TokenAnalysisPaths(indexes), analyseContext, true).getTokenPaths();
|
||||
}
|
||||
|
||||
private boolean arePathsIdenticalSymbolic(AbstractElement path1, AbstractElement path2) throws SymbolicAnalysisFailedException {
|
||||
|
@ -246,13 +306,13 @@ public class TokenAnalysis {
|
|||
List<Integer> range = range(0, i);
|
||||
|
||||
try {
|
||||
tokenListSet1 = new HashSet<>(getTokenForIndexes(path1, range));
|
||||
tokenListSet1 = new HashSet<>(getTokenPaths(path1, range, false));
|
||||
} catch (TokenAnalysisAbortedException e) {
|
||||
tokenListSet1 = null;
|
||||
}
|
||||
|
||||
try {
|
||||
tokenListSet2 = new HashSet<>(getTokenForIndexes(path2, range));
|
||||
tokenListSet2 = new HashSet<>(getTokenPaths(path2, range, false));
|
||||
} catch (TokenAnalysisAbortedException e) {
|
||||
tokenListSet2 = null;
|
||||
}
|
||||
|
@ -349,7 +409,7 @@ public class TokenAnalysis {
|
|||
// will throw TokenAnalysisAborted if any path is too short
|
||||
List<List<List<Token>>> tokenListsForPaths = paths.stream()
|
||||
//.peek(p -> log.info("next path: " + p))
|
||||
.map(p -> getTokenForIndexes(p, indexList))
|
||||
.map(p -> getTokenPaths(p, indexList, true))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
log.info("token lists: " + tokenListsForPaths);
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
/*******************************************************************************
|
||||
* Copyright (c) 2021 itemis AG (http://www.itemis.eu) and others.
|
||||
* This program and the accompanying materials are made available under the
|
||||
* terms of the Eclipse Public License 2.0 which is available at
|
||||
* http://www.eclipse.org/legal/epl-2.0.
|
||||
*
|
||||
* SPDX-License-Identifier: EPL-2.0
|
||||
*******************************************************************************/
|
||||
package org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.token;
|
||||
|
||||
/**
|
||||
* @author overflow - Initial contribution and API
|
||||
*/
|
||||
public class EofToken implements Token {
|
||||
|
||||
private int position;
|
||||
|
||||
public EofToken(int position) {
|
||||
this.position = position;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String negatedCondition() {
|
||||
return "input.LA(" + position + ") != EOF";
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + position;
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
EofToken other = (EofToken) obj;
|
||||
if (position != other.position)
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
|
@ -14,4 +14,12 @@ package org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.token;
|
|||
public class NotATokenException extends RuntimeException {
|
||||
private static final long serialVersionUID = 643265533068524552L;
|
||||
|
||||
public NotATokenException() {
|
||||
super();
|
||||
}
|
||||
|
||||
public NotATokenException(String msg) {
|
||||
super(msg);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -22,7 +22,9 @@ public interface Token {
|
|||
String negatedCondition();
|
||||
|
||||
static boolean isToken(AbstractElement element) {
|
||||
if (element instanceof Keyword) {
|
||||
if (element == null) {
|
||||
return true;
|
||||
} else if (element instanceof Keyword) {
|
||||
return true;
|
||||
} else if (element instanceof RuleCall) {
|
||||
return (((RuleCall) element).getRule() instanceof TerminalRule);
|
||||
|
@ -34,7 +36,9 @@ public interface Token {
|
|||
}
|
||||
|
||||
static Token fromElement(AbstractElement element, int position) {
|
||||
if (element instanceof Keyword) {
|
||||
if (element == null) {
|
||||
return new EofToken(position);
|
||||
} else if (element instanceof Keyword) {
|
||||
return new KeywordToken((Keyword) element, position);
|
||||
} else if (element instanceof RuleCall) {
|
||||
AbstractRule rule = ((RuleCall) element).getRule();
|
||||
|
@ -45,6 +49,6 @@ public interface Token {
|
|||
return new KeywordToken(((EnumLiteralDeclaration) element).getLiteral(), position);
|
||||
}
|
||||
|
||||
throw new NotATokenException();
|
||||
throw new NotATokenException(element.eClass().getName());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -98,6 +98,7 @@ public abstract class AbstractAntlrGrammarGenerator {
|
|||
if (_not) {
|
||||
fsa.generateFile(this.getGrammarNaming().getLexerGrammar(it).getGrammarFileName(), this.compileLexer(flattened, options));
|
||||
}
|
||||
this._hoistingProcessor.init(it);
|
||||
}
|
||||
|
||||
protected boolean isCombinedGrammar() {
|
||||
|
|
|
@ -41,6 +41,7 @@ import org.eclipse.xtext.nodemodel.util.NodeModelUtils;
|
|||
import org.eclipse.xtext.util.Pair;
|
||||
import org.eclipse.xtext.util.Strings;
|
||||
import org.eclipse.xtext.util.Tuples;
|
||||
import org.eclipse.xtext.util.XtextSwitch;
|
||||
import org.eclipse.xtext.xtext.CurrentTypeFinder;
|
||||
|
||||
import com.google.common.base.Function;
|
||||
|
@ -723,4 +724,39 @@ public class GrammarUtil {
|
|||
public static void addElementsToCompoundElement(CompoundElement element, Collection<? extends AbstractElement> elements) {
|
||||
addElementsToCompoundElement(element, elements.stream());
|
||||
}
|
||||
|
||||
private static void findAllRuleCalls(List<RuleCall> calls, AbstractElement element, AbstractRule rule) {
|
||||
new XtextSwitch<Boolean>(){
|
||||
@Override
|
||||
public Boolean caseRuleCall(RuleCall object) {
|
||||
if (object.getRule() == rule) {
|
||||
calls.add(object);
|
||||
}
|
||||
return true;
|
||||
};
|
||||
@Override
|
||||
public Boolean caseAssignment(Assignment object) {
|
||||
findAllRuleCalls(calls, object.getTerminal(), rule);
|
||||
return true;
|
||||
};
|
||||
@Override
|
||||
public Boolean caseCompoundElement(CompoundElement object) {
|
||||
for (AbstractElement element : object.getElements()) {
|
||||
findAllRuleCalls(calls, element, rule);
|
||||
}
|
||||
return true;
|
||||
};
|
||||
}.doSwitch(element);
|
||||
}
|
||||
|
||||
public static List<RuleCall> findAllRuleCalls(Grammar grammar, AbstractRule rule) {
|
||||
List<AbstractRule> rules = allRules(grammar);
|
||||
List<RuleCall> calls = new ArrayList<>();
|
||||
|
||||
for (AbstractRule r : rules) {
|
||||
findAllRuleCalls(calls, r.getAlternatives(), rule);
|
||||
}
|
||||
|
||||
return calls;
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue