mirror of
https://github.com/sigmasternchen/xtext-core
synced 2025-03-15 08:18:55 +00:00
optimized console output
console output is now hidden behind debug flag
This commit is contained in:
parent
0e26c5fccf
commit
76f580ad0c
4 changed files with 43 additions and 71 deletions
|
@ -72,9 +72,7 @@ class HoistingGeneratorBenchmark extends AbstractXtextTests {
|
||||||
val injector = Guice.createInjector(new DefaultGeneratorModule)
|
val injector = Guice.createInjector(new DefaultGeneratorModule)
|
||||||
val inMem = new InMemFSA
|
val inMem = new InMemFSA
|
||||||
val options = new AntlrOptions
|
val options = new AntlrOptions
|
||||||
System.out.println("production grammar")
|
|
||||||
injector.getInstance(AntlrDebugProductionGrammarGenerator).generate(grammar, options, inMem)
|
injector.getInstance(AntlrDebugProductionGrammarGenerator).generate(grammar, options, inMem)
|
||||||
System.out.println("content assist grammar");
|
|
||||||
injector.getInstance(AntlrDebugContentAssistGrammarGenerator).generate(grammar, options, inMem)
|
injector.getInstance(AntlrDebugContentAssistGrammarGenerator).generate(grammar, options, inMem)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -123,9 +123,7 @@ public class HoistingGeneratorBenchmark extends AbstractXtextTests {
|
||||||
final Injector injector = Guice.createInjector(_defaultGeneratorModule);
|
final Injector injector = Guice.createInjector(_defaultGeneratorModule);
|
||||||
final HoistingGeneratorBenchmark.InMemFSA inMem = new HoistingGeneratorBenchmark.InMemFSA();
|
final HoistingGeneratorBenchmark.InMemFSA inMem = new HoistingGeneratorBenchmark.InMemFSA();
|
||||||
final AntlrOptions options = new AntlrOptions();
|
final AntlrOptions options = new AntlrOptions();
|
||||||
System.out.println("production grammar");
|
|
||||||
injector.<AntlrDebugProductionGrammarGenerator>getInstance(AntlrDebugProductionGrammarGenerator.class).generate(grammar, options, inMem);
|
injector.<AntlrDebugProductionGrammarGenerator>getInstance(AntlrDebugProductionGrammarGenerator.class).generate(grammar, options, inMem);
|
||||||
System.out.println("content assist grammar");
|
|
||||||
injector.<AntlrDebugContentAssistGrammarGenerator>getInstance(AntlrDebugContentAssistGrammarGenerator.class).generate(grammar, options, inMem);
|
injector.<AntlrDebugContentAssistGrammarGenerator>getInstance(AntlrDebugContentAssistGrammarGenerator.class).generate(grammar, options, inMem);
|
||||||
} catch (Throwable _e) {
|
} catch (Throwable _e) {
|
||||||
throw Exceptions.sneakyThrow(_e);
|
throw Exceptions.sneakyThrow(_e);
|
||||||
|
|
|
@ -226,7 +226,8 @@ public class HoistingProcessor {
|
||||||
boolean hasSeen = false;
|
boolean hasSeen = false;
|
||||||
|
|
||||||
private HoistingGuard findGuardForAlternatives(CompoundElement alternatives, AbstractRule currentRule, boolean skipCache) {
|
private HoistingGuard findGuardForAlternatives(CompoundElement alternatives, AbstractRule currentRule, boolean skipCache) {
|
||||||
log.info("find guard for alternative");
|
if (config.isDebug())
|
||||||
|
log.info("find guard for alternative");
|
||||||
|
|
||||||
List<AbstractElement> paths = new ArrayList<>(alternatives.getElements());
|
List<AbstractElement> paths = new ArrayList<>(alternatives.getElements());
|
||||||
List<MergedPathGuard> guards = paths.stream()
|
List<MergedPathGuard> guards = paths.stream()
|
||||||
|
@ -246,7 +247,8 @@ public class HoistingProcessor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
log.info("path identity check");
|
if (config.isDebug())
|
||||||
|
log.info("path identity check");
|
||||||
int size = paths.size();
|
int size = paths.size();
|
||||||
try {
|
try {
|
||||||
for (int i = 0; i < size; i++) {
|
for (int i = 0; i < size; i++) {
|
||||||
|
@ -265,15 +267,17 @@ public class HoistingProcessor {
|
||||||
throw new TokenAnalysisAbortedException(e.getMessage(), e, currentRule);
|
throw new TokenAnalysisAbortedException(e.getMessage(), e, currentRule);
|
||||||
}
|
}
|
||||||
|
|
||||||
log.info("paths:" + paths);
|
if (config.isDebug())
|
||||||
|
log.info("paths:" + paths);
|
||||||
|
|
||||||
log.info("minimal path difference");
|
if (config.isDebug())
|
||||||
|
log.info("minimal path difference");
|
||||||
|
|
||||||
// if all paths are empty the above step will eliminate all paths
|
// if all paths are empty the above step will eliminate all paths
|
||||||
// -> size = 1
|
// -> size = 1
|
||||||
if (size > 1) {
|
if (size > 1) {
|
||||||
try {
|
try {
|
||||||
AlternativesGuard result = StreamUtils.zip(
|
return StreamUtils.zip(
|
||||||
analysis.findMinimalPathDifference(paths).stream()
|
analysis.findMinimalPathDifference(paths).stream()
|
||||||
.map(a -> a.stream()
|
.map(a -> a.stream()
|
||||||
.map(s -> s.stream()
|
.map(s -> s.stream()
|
||||||
|
@ -281,21 +285,15 @@ public class HoistingProcessor {
|
||||||
.collect(Collectors.toList())
|
.collect(Collectors.toList())
|
||||||
)
|
)
|
||||||
.map(TokenSequenceGuard::new)
|
.map(TokenSequenceGuard::new)
|
||||||
.peek(g -> log.info(g))
|
|
||||||
.map(TokenGuard::reduce)
|
.map(TokenGuard::reduce)
|
||||||
.peek(g -> log.info(g))
|
|
||||||
.collect(Collectors.toList())
|
.collect(Collectors.toList())
|
||||||
)
|
)
|
||||||
.map(AlternativeTokenSequenceGuard::new)
|
.map(AlternativeTokenSequenceGuard::new)
|
||||||
.peek(g -> log.info(g))
|
.map(TokenGuard::reduce),
|
||||||
.map(TokenGuard::reduce)
|
|
||||||
.peek(g -> log.info(g)),
|
|
||||||
guards.stream(),
|
guards.stream(),
|
||||||
(TokenGuard tokenGuard, MergedPathGuard pathGuard) -> Tuples.pair(tokenGuard, pathGuard)
|
(TokenGuard tokenGuard, MergedPathGuard pathGuard) -> Tuples.pair(tokenGuard, pathGuard)
|
||||||
).map(p -> new PathGuard(p.getFirst(), p.getSecond()))
|
).map(p -> new PathGuard(p.getFirst(), p.getSecond()))
|
||||||
.collect(AlternativesGuard.collector());
|
.collect(AlternativesGuard.collector());
|
||||||
log.info(result);
|
|
||||||
return result;
|
|
||||||
} catch(NestedPrefixAlternativesException e) {
|
} catch(NestedPrefixAlternativesException e) {
|
||||||
// nested prefix alternatives
|
// nested prefix alternatives
|
||||||
// -> flatten paths to alternative and try again
|
// -> flatten paths to alternative and try again
|
||||||
|
@ -364,7 +362,8 @@ public class HoistingProcessor {
|
||||||
|
|
||||||
// TODO: make private
|
// TODO: make private
|
||||||
public HoistingGuard findGuardForRule(AbstractRule rule) {
|
public HoistingGuard findGuardForRule(AbstractRule rule) {
|
||||||
log.info("finding guard for rule: " + rule.getName());
|
if (config.isDebug())
|
||||||
|
log.info("finding guard for rule: " + rule.getName());
|
||||||
return findGuardForElement(rule.getAlternatives(), rule, false);
|
return findGuardForElement(rule.getAlternatives(), rule, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -453,7 +452,8 @@ public class HoistingProcessor {
|
||||||
path = getPathOfElement(element);
|
path = getPathOfElement(element);
|
||||||
guard = elementCache.get(path);
|
guard = elementCache.get(path);
|
||||||
if (guard != null) {
|
if (guard != null) {
|
||||||
log.info("from cache: " + path);
|
if (config.isDebug())
|
||||||
|
log.info("from cache: " + path);
|
||||||
return guard;
|
return guard;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -40,6 +40,7 @@ import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.HoistingConfigura
|
||||||
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.exceptions.NestedPrefixAlternativesException;
|
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.exceptions.NestedPrefixAlternativesException;
|
||||||
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.exceptions.SymbolicAnalysisFailedException;
|
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.exceptions.SymbolicAnalysisFailedException;
|
||||||
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.exceptions.TokenAnalysisAbortedException;
|
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.exceptions.TokenAnalysisAbortedException;
|
||||||
|
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.exceptions.UnsupportedConstructException;
|
||||||
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.token.Token;
|
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.token.Token;
|
||||||
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.utils.DebugUtils;
|
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.utils.DebugUtils;
|
||||||
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.utils.MutablePrimitiveWrapper;
|
import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.utils.MutablePrimitiveWrapper;
|
||||||
|
@ -97,15 +98,14 @@ public class TokenAnalysis {
|
||||||
CompoundElement compoundContainer = (CompoundElement) container;
|
CompoundElement compoundContainer = (CompoundElement) container;
|
||||||
|
|
||||||
if (compoundContainer == null) {
|
if (compoundContainer == null) {
|
||||||
log.info("no container");
|
if (config.isDebug())
|
||||||
|
log.info("no container");
|
||||||
// no container element; this is last element in a rule definition
|
// no container element; this is last element in a rule definition
|
||||||
AbstractRule rule = containingRule(last);
|
AbstractRule rule = containingRule(last);
|
||||||
List<RuleCall> calls = findAllRuleCalls(grammar, rule).stream()
|
List<RuleCall> calls = findAllRuleCalls(grammar, rule).stream()
|
||||||
.filter(Predicate.not(visited::contains))
|
.filter(Predicate.not(visited::contains))
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
log.info("current rule: " + (rule == null ? "null" : rule.getName()));
|
|
||||||
|
|
||||||
if (isStartRule(grammar, rule)) {
|
if (isStartRule(grammar, rule)) {
|
||||||
// context is EOF
|
// context is EOF
|
||||||
result.add(null);
|
result.add(null);
|
||||||
|
@ -118,14 +118,14 @@ public class TokenAnalysis {
|
||||||
}
|
}
|
||||||
|
|
||||||
} else if (compoundContainer instanceof Group) {
|
} else if (compoundContainer instanceof Group) {
|
||||||
log.info("group container");
|
if (config.isDebug())
|
||||||
|
log.info("group container");
|
||||||
|
|
||||||
List<AbstractElement> elements = compoundContainer.getElements();
|
List<AbstractElement> elements = compoundContainer.getElements();
|
||||||
int index = elements.indexOf(last);
|
int index = elements.indexOf(last);
|
||||||
if (index < 0) {
|
if (index < 0) {
|
||||||
log.error("context analysis: element not part of compound");
|
throw new UnsupportedConstructException("context analysis: element not part of compound: " +
|
||||||
log.info(last.eClass().getName());
|
last.eClass().getName() + " in " + abstractElementToString(compoundContainer));
|
||||||
log.info(abstractElementToString(compoundContainer));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
int size = elements.size();
|
int size = elements.size();
|
||||||
|
@ -152,12 +152,11 @@ public class TokenAnalysis {
|
||||||
result.add(compoundContainer);
|
result.add(compoundContainer);
|
||||||
}
|
}
|
||||||
|
|
||||||
log.info("last element; container: " + abstractElementToShortString(compoundContainer));
|
|
||||||
|
|
||||||
result.addAll(getNextElementsInContext(compoundContainer, considerCardinalities, visited));
|
result.addAll(getNextElementsInContext(compoundContainer, considerCardinalities, visited));
|
||||||
}
|
}
|
||||||
} else if (compoundContainer instanceof UnorderedGroup) {
|
} else if (compoundContainer instanceof UnorderedGroup) {
|
||||||
log.info("unordered group container");
|
if (config.isDebug())
|
||||||
|
log.info("unordered group container");
|
||||||
|
|
||||||
if (considerCardinalities) {
|
if (considerCardinalities) {
|
||||||
result.addAll(compoundContainer.getElements().stream()
|
result.addAll(compoundContainer.getElements().stream()
|
||||||
|
@ -177,18 +176,20 @@ public class TokenAnalysis {
|
||||||
}
|
}
|
||||||
|
|
||||||
private TokenAnalysisPaths getTokenPathsContext(AbstractElement last, TokenAnalysisPaths prefix, boolean considerCardinalities, Set<AbstractElement> callStack) {
|
private TokenAnalysisPaths getTokenPathsContext(AbstractElement last, TokenAnalysisPaths prefix, boolean considerCardinalities, Set<AbstractElement> callStack) {
|
||||||
log.info("get context for: " + abstractElementToShortString(last) + (considerCardinalities ? " with" : " without") + " cardinalities");
|
if (config.isDebug())
|
||||||
|
log.info("get context for: " + abstractElementToShortString(last) + (considerCardinalities ? " with" : " without") + " cardinalities");
|
||||||
|
|
||||||
List<AbstractElement> context = getNextElementsInContext(last, considerCardinalities);
|
List<AbstractElement> context = getNextElementsInContext(last, considerCardinalities);
|
||||||
|
|
||||||
log.info(context.size());
|
if (config.isDebug())
|
||||||
log.info(context.stream().map(DebugUtils::abstractElementToShortString).collect(Collectors.toList()));
|
log.info(context.stream().map(DebugUtils::abstractElementToShortString).collect(Collectors.toList()));
|
||||||
|
|
||||||
TokenAnalysisPaths result = TokenAnalysisPaths.empty(prefix);
|
TokenAnalysisPaths result = TokenAnalysisPaths.empty(prefix);
|
||||||
|
|
||||||
int actualNumberOfElements = 0;
|
int actualNumberOfElements = 0;
|
||||||
for (AbstractElement element : context) {
|
for (AbstractElement element : context) {
|
||||||
log.info("context element: " + abstractElementToShortString(element));
|
if (config.isDebug())
|
||||||
|
log.info("context element: " + abstractElementToShortString(element));
|
||||||
TokenAnalysisPaths path = new TokenAnalysisPaths(prefix);
|
TokenAnalysisPaths path = new TokenAnalysisPaths(prefix);
|
||||||
path.resetProgress();
|
path.resetProgress();
|
||||||
// shortcut endless loops instead of throwing exception
|
// shortcut endless loops instead of throwing exception
|
||||||
|
@ -212,7 +213,8 @@ public class TokenAnalysis {
|
||||||
if (path.isDone()) {
|
if (path.isDone()) {
|
||||||
result = result.merge(path);
|
result = result.merge(path);
|
||||||
} else {
|
} else {
|
||||||
log.info("context analysis failed");
|
if (config.isDebug())
|
||||||
|
log.info("context analysis failed");
|
||||||
throw new TokenAnalysisAbortedException("context analysis failed");
|
throw new TokenAnalysisAbortedException("context analysis failed");
|
||||||
}
|
}
|
||||||
actualNumberOfElements++;
|
actualNumberOfElements++;
|
||||||
|
@ -222,8 +224,9 @@ public class TokenAnalysis {
|
||||||
// TODO: is this special case n)ecessary?
|
// TODO: is this special case n)ecessary?
|
||||||
throw new TokenAnalysisAbortedException("context analysis failed: no context");
|
throw new TokenAnalysisAbortedException("context analysis failed: no context");
|
||||||
}
|
}
|
||||||
|
|
||||||
log.info("done");
|
if (config.isDebug())
|
||||||
|
log.info("done");
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -248,9 +251,7 @@ public class TokenAnalysis {
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
private TokenAnalysisPaths getTokenPathsTrivial(UnorderedGroup path, TokenAnalysisPaths prefix, boolean shortcutEndlessLoops) {
|
private TokenAnalysisPaths getTokenPathsTrivial(UnorderedGroup path, TokenAnalysisPaths prefix, boolean shortcutEndlessLoops) {
|
||||||
log.info("unordered group");
|
|
||||||
|
|
||||||
TokenAnalysisPaths result;
|
TokenAnalysisPaths result;
|
||||||
TokenAnalysisPaths current;
|
TokenAnalysisPaths current;
|
||||||
|
|
||||||
|
@ -263,15 +264,10 @@ public class TokenAnalysis {
|
||||||
int currentPosition = result.getMinPosition();
|
int currentPosition = result.getMinPosition();
|
||||||
|
|
||||||
do {
|
do {
|
||||||
log.info("unordered group loop");
|
|
||||||
|
|
||||||
current = TokenAnalysisPaths.empty(result);
|
current = TokenAnalysisPaths.empty(result);
|
||||||
current.resetProgress();
|
current.resetProgress();
|
||||||
for (AbstractElement element : path.getElements()) {
|
for (AbstractElement element : path.getElements()) {
|
||||||
log.info(abstractElementToShortString(element));
|
|
||||||
log.info(current.hasProgress() + " - " + current.getSize());
|
|
||||||
current = current.merge(getTokenPaths(element, result, false, false, shortcutEndlessLoops));
|
current = current.merge(getTokenPaths(element, result, false, false, shortcutEndlessLoops));
|
||||||
log.info(current.hasProgress() + " - " + current.getSize());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
result.resetProgress();
|
result.resetProgress();
|
||||||
|
@ -457,17 +453,11 @@ public class TokenAnalysis {
|
||||||
return getTokenPathsContext(path, new TokenAnalysisPaths(indexes)).getTokenPaths();
|
return getTokenPathsContext(path, new TokenAnalysisPaths(indexes)).getTokenPaths();
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean arePathsIdenticalSymbolic(AbstractElement path1, AbstractElement path2) throws SymbolicAnalysisFailedException {
|
|
||||||
// ignore symbolic analysis for the moment
|
|
||||||
// TODO
|
|
||||||
throw new SymbolicAnalysisFailedException();
|
|
||||||
}
|
|
||||||
|
|
||||||
private List<Integer> range(int i, int j) {
|
private List<Integer> range(int i, int j) {
|
||||||
return IntStream.rangeClosed(i, j).boxed().collect(Collectors.toList());
|
return IntStream.rangeClosed(i, j).boxed().collect(Collectors.toList());
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean arePathsIdenticalFallback(AbstractElement path1, AbstractElement path2) {
|
public boolean arePathsIdentical(AbstractElement path1, AbstractElement path2) {
|
||||||
if (config.isDebug()) {
|
if (config.isDebug()) {
|
||||||
log.info("path1: " + abstractElementToString(path1));
|
log.info("path1: " + abstractElementToString(path1));
|
||||||
log.info("path2: " + abstractElementToString(path2));
|
log.info("path2: " + abstractElementToString(path2));
|
||||||
|
@ -492,18 +482,13 @@ public class TokenAnalysis {
|
||||||
int maxPosition1 = tokenPaths1.getMaxPosition();
|
int maxPosition1 = tokenPaths1.getMaxPosition();
|
||||||
int maxPosition2 = tokenPaths2.getMaxPosition();
|
int maxPosition2 = tokenPaths2.getMaxPosition();
|
||||||
|
|
||||||
log.info("set1: " + tokenListSet1 + ", " + maxPosition1);
|
|
||||||
log.info("set2: " + tokenListSet2 + ", " + maxPosition2);
|
|
||||||
|
|
||||||
if (!tokenListSet1.equals(tokenListSet2)) {
|
if (!tokenListSet1.equals(tokenListSet2)) {
|
||||||
log.info("not identical");
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (maxPosition1 < i + 1) {
|
if (maxPosition1 < i + 1) {
|
||||||
// different max positions would have failed the equals-Operation
|
// different max positions would have failed the equals-Operation
|
||||||
// if the max position is smaller than i + 1 the end of the path has been reached
|
// if the max position is smaller than i + 1 the end of the path has been reached
|
||||||
log.info("identical");
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -514,14 +499,6 @@ public class TokenAnalysis {
|
||||||
throw new TokenAnalysisAbortedException("token limit exhausted while looking for identical paths");
|
throw new TokenAnalysisAbortedException("token limit exhausted while looking for identical paths");
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean arePathsIdentical(AbstractElement path1, AbstractElement path2) {
|
|
||||||
try {
|
|
||||||
return arePathsIdenticalSymbolic(path1, path2);
|
|
||||||
} catch (SymbolicAnalysisFailedException e) {
|
|
||||||
return arePathsIdenticalFallback(path1, path2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void tokenCombinations(Function<List<Integer>, Boolean> callback) {
|
private void tokenCombinations(Function<List<Integer>, Boolean> callback) {
|
||||||
MutablePrimitiveWrapper<Integer> limit = new MutablePrimitiveWrapper<>(config.getTokenLimit());
|
MutablePrimitiveWrapper<Integer> limit = new MutablePrimitiveWrapper<>(config.getTokenLimit());
|
||||||
|
|
||||||
|
@ -560,7 +537,9 @@ public class TokenAnalysis {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
} catch (TokenAnalysisAbortedException e) {
|
} catch (TokenAnalysisAbortedException e) {
|
||||||
log.info("token combinations: " + e.getMessage());
|
if (config.isDebug())
|
||||||
|
log.info("tokens exhausted: " + e.getMessage());
|
||||||
|
|
||||||
// tokens exhausted; abort current prefix
|
// tokens exhausted; abort current prefix
|
||||||
// set limit for calling functions so this index is not checked again
|
// set limit for calling functions so this index is not checked again
|
||||||
limit.set(i);
|
limit.set(i);
|
||||||
|
@ -580,10 +559,7 @@ public class TokenAnalysis {
|
||||||
|
|
||||||
MutablePrimitiveWrapper<List<List<Token>>> result = new MutablePrimitiveWrapper<List<List<Token>>>(null);
|
MutablePrimitiveWrapper<List<List<Token>>> result = new MutablePrimitiveWrapper<List<List<Token>>>(null);
|
||||||
|
|
||||||
log.info("cardinality: " + virtualCardinality);
|
|
||||||
|
|
||||||
tokenCombinations(indexList -> {
|
tokenCombinations(indexList -> {
|
||||||
log.info("current index list: " + indexList);
|
|
||||||
|
|
||||||
// no context analysis // TODO why?
|
// no context analysis // TODO why?
|
||||||
List<List<Token>> tokenListsForPath = getTokenPaths(element, virtualCardinality, indexList, false);
|
List<List<Token>> tokenListsForPath = getTokenPaths(element, virtualCardinality, indexList, false);
|
||||||
|
@ -619,15 +595,15 @@ public class TokenAnalysis {
|
||||||
}
|
}
|
||||||
|
|
||||||
tokenCombinations(indexList -> {
|
tokenCombinations(indexList -> {
|
||||||
log.info("current index list: " + indexList);
|
|
||||||
|
|
||||||
// will throw TokenAnalysisAborted if any path is too short
|
// will throw TokenAnalysisAborted if any path is too short
|
||||||
List<List<List<Token>>> tokenListsForPaths = paths.stream()
|
List<List<List<Token>>> tokenListsForPaths = paths.stream()
|
||||||
//.peek(p -> log.info("next path: " + p))
|
.peek(p -> {
|
||||||
|
if (config.isDebug())
|
||||||
|
log.info("next path: " + p);
|
||||||
|
})
|
||||||
.map(p -> getTokenPaths(p, indexList, true))
|
.map(p -> getTokenPaths(p, indexList, true))
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
log.info("token lists: " + tokenListsForPaths);
|
|
||||||
|
|
||||||
int size = result.size();
|
int size = result.size();
|
||||||
for (int i = 0; i < size; i++) {
|
for (int i = 0; i < size; i++) {
|
||||||
|
|
Loading…
Reference in a new issue