mirror of
https://github.com/sigmasternchen/xtext-core
synced 2025-03-15 08:18:55 +00:00
changed context analysis to a blacklist model
instead of just ignoring the path when there is no progress in an endless recursion. Now we can reset the considerCardinalities flag after using it.
This commit is contained in:
parent
e16e1c2ac3
commit
e6d0b4a23b
2 changed files with 42 additions and 18 deletions
|
@ -173,10 +173,16 @@ public class TokenAnalysis {
|
|||
}
|
||||
|
||||
private TokenAnalysisPaths getTokenPathsContext(AbstractElement last, TokenAnalysisPaths prefix) {
|
||||
return getTokenPathsContext(last, prefix, true, new HashSet<>());
|
||||
return getTokenPathsContext(last, prefix, true, new HashSet<>(), new HashSet<>());
|
||||
}
|
||||
|
||||
private TokenAnalysisPaths getTokenPathsContext(AbstractElement last, TokenAnalysisPaths prefix, boolean considerCardinalities, Set<AbstractElement> visitedElements) {
|
||||
private TokenAnalysisPaths getTokenPathsContext(
|
||||
AbstractElement last,
|
||||
TokenAnalysisPaths prefix,
|
||||
boolean considerCardinalities,
|
||||
Set<AbstractElement> visitedElements,
|
||||
Set<AbstractElement> blacklistedElements
|
||||
) {
|
||||
if (config.isDebug())
|
||||
log.info("get context for: " + abstractElementToShortString(last) +
|
||||
(considerCardinalities ? " with" : " without") + " cardinalities");
|
||||
|
@ -192,27 +198,35 @@ public class TokenAnalysis {
|
|||
for (AbstractElement element : context) {
|
||||
if (config.isDebug())
|
||||
log.info("context element: " + abstractElementToShortString(element));
|
||||
|
||||
if (blacklistedElements.contains(element)) {
|
||||
if (config.isDebug())
|
||||
log.info("blacklisted element");
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
TokenAnalysisPaths path = new TokenAnalysisPaths(prefix);
|
||||
path.resetProgress();
|
||||
|
||||
// shortcut endless loops instead of throwing exception
|
||||
path = getTokenPaths(element, path, false, false);
|
||||
|
||||
if (!path.isDone() && element != null) {
|
||||
boolean _considerCardinalities = considerCardinalities;
|
||||
if (visitedElements.contains(element) && !path.hasProgress()) {
|
||||
if (_considerCardinalities) {
|
||||
_considerCardinalities = false;
|
||||
} else {
|
||||
// considerCardinalities is already false
|
||||
log.info("failed to analyse cardinalities in context");
|
||||
// ignore this branch
|
||||
continue;
|
||||
}
|
||||
}
|
||||
boolean _considerCardinalities = true;
|
||||
|
||||
Set<AbstractElement> localVisitedElements = new HashSet<>(visitedElements);
|
||||
Set<AbstractElement> localBlacklst = new HashSet<>(blacklistedElements);
|
||||
|
||||
if (visitedElements.contains(element) && !path.hasCandidates()) {
|
||||
_considerCardinalities = false;
|
||||
if (!considerCardinalities) {
|
||||
log.warn("context analysis: analyzing cardinalities failed: blacklisting element");
|
||||
localBlacklst.add(element);
|
||||
}
|
||||
} else {
|
||||
localVisitedElements.add(element);
|
||||
path = getTokenPathsContext(element, path, _considerCardinalities, localVisitedElements);
|
||||
}
|
||||
|
||||
path = getTokenPathsContext(element, path, _considerCardinalities, localVisitedElements, localBlacklst);
|
||||
}
|
||||
if (path.isDone()) {
|
||||
result = result.merge(path);
|
||||
|
@ -576,12 +590,14 @@ public class TokenAnalysis {
|
|||
}
|
||||
|
||||
tokenCombinations(indexList -> {
|
||||
if (config.isDebug())
|
||||
log.info(indexList);
|
||||
|
||||
// will throw TokenAnalysisAborted if any path is too short
|
||||
List<List<List<Token>>> tokenListsForPaths = paths.stream()
|
||||
.peek(p -> {
|
||||
if (config.isDebug())
|
||||
log.info("next path: " + p);
|
||||
log.info("next path: " + abstractElementToShortString(p));
|
||||
})
|
||||
.map(p -> getTokenPathsContext(p, indexList).getTokenPaths())
|
||||
.collect(Collectors.toList());
|
||||
|
|
|
@ -22,6 +22,7 @@ public class TokenAnalysisPaths {
|
|||
private List<TokenAnalysisPath> tokenPaths = new ArrayList<>(10);
|
||||
private boolean isEmpty = false;
|
||||
private boolean hasProgress = false;
|
||||
private boolean hasCandidates = false;
|
||||
|
||||
public TokenAnalysisPaths(List<Integer> indexes) {
|
||||
tokenPaths.add(new TokenAnalysisPath(indexes));
|
||||
|
@ -49,11 +50,17 @@ public class TokenAnalysisPaths {
|
|||
return hasProgress;
|
||||
}
|
||||
|
||||
public boolean hasCandidates() {
|
||||
return hasCandidates;
|
||||
}
|
||||
|
||||
public void resetProgress() {
|
||||
hasProgress = false;
|
||||
hasCandidates = false;
|
||||
}
|
||||
|
||||
public void add(AbstractElement element) {
|
||||
hasCandidates = true;
|
||||
tokenPaths.forEach(p -> hasProgress = p.add(element) || hasProgress);
|
||||
}
|
||||
|
||||
|
@ -78,6 +85,7 @@ public class TokenAnalysisPaths {
|
|||
if (addAllDistinct(other)) {
|
||||
this.hasProgress |= other.hasProgress;
|
||||
}
|
||||
this.hasCandidates |= other.hasCandidates;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue