mirror of
https://github.com/sigmasternchen/xtext-core
synced 2025-03-15 08:18:55 +00:00
changed context analysis to a blacklist model
instead of just ignoring the path when there is no progress in an endless recursion. Now we can reset the considerCardinalities flag after using it.
This commit is contained in:
parent
e16e1c2ac3
commit
e6d0b4a23b
2 changed files with 42 additions and 18 deletions
|
@ -173,10 +173,16 @@ public class TokenAnalysis {
|
||||||
}
|
}
|
||||||
|
|
||||||
private TokenAnalysisPaths getTokenPathsContext(AbstractElement last, TokenAnalysisPaths prefix) {
|
private TokenAnalysisPaths getTokenPathsContext(AbstractElement last, TokenAnalysisPaths prefix) {
|
||||||
return getTokenPathsContext(last, prefix, true, new HashSet<>());
|
return getTokenPathsContext(last, prefix, true, new HashSet<>(), new HashSet<>());
|
||||||
}
|
}
|
||||||
|
|
||||||
private TokenAnalysisPaths getTokenPathsContext(AbstractElement last, TokenAnalysisPaths prefix, boolean considerCardinalities, Set<AbstractElement> visitedElements) {
|
private TokenAnalysisPaths getTokenPathsContext(
|
||||||
|
AbstractElement last,
|
||||||
|
TokenAnalysisPaths prefix,
|
||||||
|
boolean considerCardinalities,
|
||||||
|
Set<AbstractElement> visitedElements,
|
||||||
|
Set<AbstractElement> blacklistedElements
|
||||||
|
) {
|
||||||
if (config.isDebug())
|
if (config.isDebug())
|
||||||
log.info("get context for: " + abstractElementToShortString(last) +
|
log.info("get context for: " + abstractElementToShortString(last) +
|
||||||
(considerCardinalities ? " with" : " without") + " cardinalities");
|
(considerCardinalities ? " with" : " without") + " cardinalities");
|
||||||
|
@ -192,27 +198,35 @@ public class TokenAnalysis {
|
||||||
for (AbstractElement element : context) {
|
for (AbstractElement element : context) {
|
||||||
if (config.isDebug())
|
if (config.isDebug())
|
||||||
log.info("context element: " + abstractElementToShortString(element));
|
log.info("context element: " + abstractElementToShortString(element));
|
||||||
|
|
||||||
|
if (blacklistedElements.contains(element)) {
|
||||||
|
if (config.isDebug())
|
||||||
|
log.info("blacklisted element");
|
||||||
|
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
TokenAnalysisPaths path = new TokenAnalysisPaths(prefix);
|
TokenAnalysisPaths path = new TokenAnalysisPaths(prefix);
|
||||||
path.resetProgress();
|
path.resetProgress();
|
||||||
|
|
||||||
// shortcut endless loops instead of throwing exception
|
|
||||||
path = getTokenPaths(element, path, false, false);
|
path = getTokenPaths(element, path, false, false);
|
||||||
|
|
||||||
if (!path.isDone() && element != null) {
|
if (!path.isDone() && element != null) {
|
||||||
boolean _considerCardinalities = considerCardinalities;
|
boolean _considerCardinalities = true;
|
||||||
if (visitedElements.contains(element) && !path.hasProgress()) {
|
|
||||||
if (_considerCardinalities) {
|
|
||||||
_considerCardinalities = false;
|
|
||||||
} else {
|
|
||||||
// considerCardinalities is already false
|
|
||||||
log.info("failed to analyse cardinalities in context");
|
|
||||||
// ignore this branch
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Set<AbstractElement> localVisitedElements = new HashSet<>(visitedElements);
|
Set<AbstractElement> localVisitedElements = new HashSet<>(visitedElements);
|
||||||
localVisitedElements.add(element);
|
Set<AbstractElement> localBlacklst = new HashSet<>(blacklistedElements);
|
||||||
path = getTokenPathsContext(element, path, _considerCardinalities, localVisitedElements);
|
|
||||||
|
if (visitedElements.contains(element) && !path.hasCandidates()) {
|
||||||
|
_considerCardinalities = false;
|
||||||
|
if (!considerCardinalities) {
|
||||||
|
log.warn("context analysis: analyzing cardinalities failed: blacklisting element");
|
||||||
|
localBlacklst.add(element);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
localVisitedElements.add(element);
|
||||||
|
}
|
||||||
|
|
||||||
|
path = getTokenPathsContext(element, path, _considerCardinalities, localVisitedElements, localBlacklst);
|
||||||
}
|
}
|
||||||
if (path.isDone()) {
|
if (path.isDone()) {
|
||||||
result = result.merge(path);
|
result = result.merge(path);
|
||||||
|
@ -576,12 +590,14 @@ public class TokenAnalysis {
|
||||||
}
|
}
|
||||||
|
|
||||||
tokenCombinations(indexList -> {
|
tokenCombinations(indexList -> {
|
||||||
|
if (config.isDebug())
|
||||||
|
log.info(indexList);
|
||||||
|
|
||||||
// will throw TokenAnalysisAborted if any path is too short
|
// will throw TokenAnalysisAborted if any path is too short
|
||||||
List<List<List<Token>>> tokenListsForPaths = paths.stream()
|
List<List<List<Token>>> tokenListsForPaths = paths.stream()
|
||||||
.peek(p -> {
|
.peek(p -> {
|
||||||
if (config.isDebug())
|
if (config.isDebug())
|
||||||
log.info("next path: " + p);
|
log.info("next path: " + abstractElementToShortString(p));
|
||||||
})
|
})
|
||||||
.map(p -> getTokenPathsContext(p, indexList).getTokenPaths())
|
.map(p -> getTokenPathsContext(p, indexList).getTokenPaths())
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
|
|
|
@ -22,6 +22,7 @@ public class TokenAnalysisPaths {
|
||||||
private List<TokenAnalysisPath> tokenPaths = new ArrayList<>(10);
|
private List<TokenAnalysisPath> tokenPaths = new ArrayList<>(10);
|
||||||
private boolean isEmpty = false;
|
private boolean isEmpty = false;
|
||||||
private boolean hasProgress = false;
|
private boolean hasProgress = false;
|
||||||
|
private boolean hasCandidates = false;
|
||||||
|
|
||||||
public TokenAnalysisPaths(List<Integer> indexes) {
|
public TokenAnalysisPaths(List<Integer> indexes) {
|
||||||
tokenPaths.add(new TokenAnalysisPath(indexes));
|
tokenPaths.add(new TokenAnalysisPath(indexes));
|
||||||
|
@ -49,11 +50,17 @@ public class TokenAnalysisPaths {
|
||||||
return hasProgress;
|
return hasProgress;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean hasCandidates() {
|
||||||
|
return hasCandidates;
|
||||||
|
}
|
||||||
|
|
||||||
public void resetProgress() {
|
public void resetProgress() {
|
||||||
hasProgress = false;
|
hasProgress = false;
|
||||||
|
hasCandidates = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void add(AbstractElement element) {
|
public void add(AbstractElement element) {
|
||||||
|
hasCandidates = true;
|
||||||
tokenPaths.forEach(p -> hasProgress = p.add(element) || hasProgress);
|
tokenPaths.forEach(p -> hasProgress = p.add(element) || hasProgress);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -78,6 +85,7 @@ public class TokenAnalysisPaths {
|
||||||
if (addAllDistinct(other)) {
|
if (addAllDistinct(other)) {
|
||||||
this.hasProgress |= other.hasProgress;
|
this.hasProgress |= other.hasProgress;
|
||||||
}
|
}
|
||||||
|
this.hasCandidates |= other.hasCandidates;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue