mirror of
https://github.com/sigmasternchen/xtext-core
synced 2025-03-15 08:18:55 +00:00
changed token limit for tree rebuild
is necessary because otherwise the identity analysis might not be able to detect if paths are identical up to the token limit (which should cause an error)
This commit is contained in:
parent
8fac71dda9
commit
edbc920247
2 changed files with 4 additions and 3 deletions
|
@ -238,7 +238,7 @@ public class HoistingProcessor {
|
||||||
// all paths are trivial
|
// all paths are trivial
|
||||||
|
|
||||||
// if there is a terminal on all branches set has terminal to true
|
// if there is a terminal on all branches set has terminal to true
|
||||||
// else we need might need to consider the following tokens in the path
|
// else we might need to consider the following tokens in the path
|
||||||
if (guards.stream().allMatch(HoistingGuard::hasTerminal)) {
|
if (guards.stream().allMatch(HoistingGuard::hasTerminal)) {
|
||||||
return HoistingGuard.terminal();
|
return HoistingGuard.terminal();
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -500,7 +500,7 @@ public class TokenAnalysis {
|
||||||
tokenCombinations(indexList -> {
|
tokenCombinations(indexList -> {
|
||||||
log.info("current index list: " + indexList);
|
log.info("current index list: " + indexList);
|
||||||
|
|
||||||
// no context analysis
|
// no context analysis // TODO why?
|
||||||
List<List<Token>> tokenListsForPath = getTokenPaths(element, indexList, false);
|
List<List<Token>> tokenListsForPath = getTokenPaths(element, indexList, false);
|
||||||
List<List<Token>> tokenListForContext = getTokenPathsContextOnly(element, indexList);
|
List<List<Token>> tokenListForContext = getTokenPathsContextOnly(element, indexList);
|
||||||
|
|
||||||
|
@ -572,7 +572,8 @@ public class TokenAnalysis {
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<List<AbstractElement>> getAllPossiblePaths(AbstractElement path) {
|
public List<List<AbstractElement>> getAllPossiblePaths(AbstractElement path) {
|
||||||
return getTokenPaths(path, new TokenAnalysisPaths(range(0, config.getTokenLimit() + 1)), false, false, true)
|
// token limit + 2 so identity analysis will recognize paths that are identical up to the token limit on the flattened tree
|
||||||
|
return getTokenPaths(path, new TokenAnalysisPaths(range(0, config.getTokenLimit() + 2)), false, false, true)
|
||||||
.getTokenPaths()
|
.getTokenPaths()
|
||||||
.stream()
|
.stream()
|
||||||
.map(l -> l.stream()
|
.map(l -> l.stream()
|
||||||
|
|
Loading…
Reference in a new issue