From 76f580ad0c9f277baf0e940cea128019cce73f62 Mon Sep 17 00:00:00 2001
From: overflowerror <mail@overflowerror.com>
Date: Fri, 28 Jan 2022 20:38:47 +0100
Subject: [PATCH] optimized console output

console output is now hidden behind debug flag
---
 .../hoisting/HoistingGeneratorBenchmark.xtend |  2 -
 .../hoisting/HoistingGeneratorBenchmark.java  |  2 -
 .../antlr/hoisting/HoistingProcessor.java     | 28 +++----
 .../hoisting/pathAnalysis/TokenAnalysis.java  | 82 +++++++------------
 4 files changed, 43 insertions(+), 71 deletions(-)

diff --git a/org.eclipse.xtext.tests/src/org/eclipse/xtext/xtext/generator/hoisting/HoistingGeneratorBenchmark.xtend b/org.eclipse.xtext.tests/src/org/eclipse/xtext/xtext/generator/hoisting/HoistingGeneratorBenchmark.xtend
index 1639e7b8c..6a502dbdc 100644
--- a/org.eclipse.xtext.tests/src/org/eclipse/xtext/xtext/generator/hoisting/HoistingGeneratorBenchmark.xtend
+++ b/org.eclipse.xtext.tests/src/org/eclipse/xtext/xtext/generator/hoisting/HoistingGeneratorBenchmark.xtend
@@ -72,9 +72,7 @@ class HoistingGeneratorBenchmark extends AbstractXtextTests {
 		val injector = Guice.createInjector(new DefaultGeneratorModule)
 		val inMem = new InMemFSA
 		val options = new AntlrOptions
-		System.out.println("production grammar")
 		injector.getInstance(AntlrDebugProductionGrammarGenerator).generate(grammar, options, inMem)
-		System.out.println("content assist grammar");
 		injector.getInstance(AntlrDebugContentAssistGrammarGenerator).generate(grammar, options, inMem)
 	}
 	
diff --git a/org.eclipse.xtext.tests/xtend-gen/org/eclipse/xtext/xtext/generator/hoisting/HoistingGeneratorBenchmark.java b/org.eclipse.xtext.tests/xtend-gen/org/eclipse/xtext/xtext/generator/hoisting/HoistingGeneratorBenchmark.java
index 5e6420f3c..359debbd6 100644
--- a/org.eclipse.xtext.tests/xtend-gen/org/eclipse/xtext/xtext/generator/hoisting/HoistingGeneratorBenchmark.java
+++ b/org.eclipse.xtext.tests/xtend-gen/org/eclipse/xtext/xtext/generator/hoisting/HoistingGeneratorBenchmark.java
@@ -123,9 +123,7 @@ public class HoistingGeneratorBenchmark extends AbstractXtextTests {
       final Injector injector = Guice.createInjector(_defaultGeneratorModule);
       final HoistingGeneratorBenchmark.InMemFSA inMem = new HoistingGeneratorBenchmark.InMemFSA();
       final AntlrOptions options = new AntlrOptions();
-      System.out.println("production grammar");
       injector.<AntlrDebugProductionGrammarGenerator>getInstance(AntlrDebugProductionGrammarGenerator.class).generate(grammar, options, inMem);
-      System.out.println("content assist grammar");
       injector.<AntlrDebugContentAssistGrammarGenerator>getInstance(AntlrDebugContentAssistGrammarGenerator.class).generate(grammar, options, inMem);
     } catch (Throwable _e) {
       throw Exceptions.sneakyThrow(_e);
diff --git a/org.eclipse.xtext.xtext.generator/src/org/eclipse/xtext/xtext/generator/parser/antlr/hoisting/HoistingProcessor.java b/org.eclipse.xtext.xtext.generator/src/org/eclipse/xtext/xtext/generator/parser/antlr/hoisting/HoistingProcessor.java
index 88c929ae6..336a6a222 100644
--- a/org.eclipse.xtext.xtext.generator/src/org/eclipse/xtext/xtext/generator/parser/antlr/hoisting/HoistingProcessor.java
+++ b/org.eclipse.xtext.xtext.generator/src/org/eclipse/xtext/xtext/generator/parser/antlr/hoisting/HoistingProcessor.java
@@ -226,7 +226,8 @@ public class HoistingProcessor {
 	boolean hasSeen = false;
 	
 	private HoistingGuard findGuardForAlternatives(CompoundElement alternatives, AbstractRule currentRule, boolean skipCache) {
-		log.info("find guard for alternative");
+		if (config.isDebug())
+			log.info("find guard for alternative");
 		
 		List<AbstractElement> paths = new ArrayList<>(alternatives.getElements());
 		List<MergedPathGuard> guards = paths.stream()
@@ -246,7 +247,8 @@ public class HoistingProcessor {
 			}
 		}
 
-		log.info("path identity check");
+		if (config.isDebug())
+			log.info("path identity check");
 		int size = paths.size();
 		try {
 			for (int i = 0; i < size; i++) {
@@ -265,15 +267,17 @@ public class HoistingProcessor {
 			throw new TokenAnalysisAbortedException(e.getMessage(), e, currentRule);
 		}
 		
-		log.info("paths:" + paths);
+		if (config.isDebug())
+			log.info("paths:" + paths);
 		
-		log.info("minimal path difference");
+		if (config.isDebug())
+			log.info("minimal path difference");
 		
 		// if all paths are empty the above step will eliminate all paths
 		// -> size = 1
 		if (size > 1) {
 			try {
-				AlternativesGuard result = StreamUtils.zip(
+				return StreamUtils.zip(
 					analysis.findMinimalPathDifference(paths).stream()
 						.map(a -> a.stream()
 							.map(s -> s.stream()
@@ -281,21 +285,15 @@ public class HoistingProcessor {
 									.collect(Collectors.toList())
 							)
 							.map(TokenSequenceGuard::new)
-							.peek(g -> log.info(g))
 							.map(TokenGuard::reduce)
-							.peek(g -> log.info(g))
 							.collect(Collectors.toList())
 						)
 						.map(AlternativeTokenSequenceGuard::new)
-						.peek(g -> log.info(g))
-						.map(TokenGuard::reduce)
-						.peek(g -> log.info(g)),
+						.map(TokenGuard::reduce),
 					guards.stream(),
 					(TokenGuard tokenGuard, MergedPathGuard pathGuard) -> Tuples.pair(tokenGuard, pathGuard)
 				).map(p -> new PathGuard(p.getFirst(), p.getSecond()))
 				.collect(AlternativesGuard.collector());
-				log.info(result);
-				return result;
 			} catch(NestedPrefixAlternativesException e) {
 				// nested prefix alternatives
 				// -> flatten paths to alternative and try again
@@ -364,7 +362,8 @@ public class HoistingProcessor {
 	
 	// TODO: make private
 	public HoistingGuard findGuardForRule(AbstractRule rule) {
-		log.info("finding guard for rule: " + rule.getName());
+		if (config.isDebug())
+			log.info("finding guard for rule: " + rule.getName());
 		return findGuardForElement(rule.getAlternatives(), rule, false);
 	}
 	
@@ -453,7 +452,8 @@ public class HoistingProcessor {
 			path = getPathOfElement(element);
 			guard = elementCache.get(path);
 			if (guard != null) {
-				log.info("from cache: " + path);
+				if (config.isDebug())
+					log.info("from cache: " + path);
 				return guard;
 			}
 		}
diff --git a/org.eclipse.xtext.xtext.generator/src/org/eclipse/xtext/xtext/generator/parser/antlr/hoisting/pathAnalysis/TokenAnalysis.java b/org.eclipse.xtext.xtext.generator/src/org/eclipse/xtext/xtext/generator/parser/antlr/hoisting/pathAnalysis/TokenAnalysis.java
index 5c9dbad4e..b899e29cd 100644
--- a/org.eclipse.xtext.xtext.generator/src/org/eclipse/xtext/xtext/generator/parser/antlr/hoisting/pathAnalysis/TokenAnalysis.java
+++ b/org.eclipse.xtext.xtext.generator/src/org/eclipse/xtext/xtext/generator/parser/antlr/hoisting/pathAnalysis/TokenAnalysis.java
@@ -40,6 +40,7 @@ import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.HoistingConfigura
 import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.exceptions.NestedPrefixAlternativesException;
 import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.exceptions.SymbolicAnalysisFailedException;
 import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.exceptions.TokenAnalysisAbortedException;
+import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.exceptions.UnsupportedConstructException;
 import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.token.Token;
 import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.utils.DebugUtils;
 import org.eclipse.xtext.xtext.generator.parser.antlr.hoisting.utils.MutablePrimitiveWrapper;
@@ -97,15 +98,14 @@ public class TokenAnalysis {
 		CompoundElement compoundContainer = (CompoundElement) container;
 		
 		if (compoundContainer == null) {
-			log.info("no container");
+			if (config.isDebug())
+				log.info("no container");
 			// no container element; this is last element in a rule definition
 			AbstractRule rule = containingRule(last);
 			List<RuleCall> calls = findAllRuleCalls(grammar, rule).stream()
 					.filter(Predicate.not(visited::contains))
 					.collect(Collectors.toList());
 			
-			log.info("current rule: " + (rule == null ? "null" : rule.getName()));
-			
 			if (isStartRule(grammar, rule)) {
 				// context is EOF
 				result.add(null);
@@ -118,14 +118,14 @@ public class TokenAnalysis {
 			}
 			
 		} else if (compoundContainer instanceof Group) {
-			log.info("group container");
+			if (config.isDebug())
+				log.info("group container");
 			
 			List<AbstractElement> elements = compoundContainer.getElements();
 			int index = elements.indexOf(last);
 			if (index < 0) {
-				log.error("context analysis: element not part of compound");
-				log.info(last.eClass().getName());
-				log.info(abstractElementToString(compoundContainer));
+				throw new UnsupportedConstructException("context analysis: element not part of compound: " + 
+						last.eClass().getName() + " in " + abstractElementToString(compoundContainer));
 			}
 			
 			int size = elements.size();
@@ -152,12 +152,11 @@ public class TokenAnalysis {
 					result.add(compoundContainer);
 				}
 				
-				log.info("last element; container: " + abstractElementToShortString(compoundContainer));
-				
 				result.addAll(getNextElementsInContext(compoundContainer, considerCardinalities, visited));
 			}
 		} else if (compoundContainer instanceof UnorderedGroup) {
-			log.info("unordered group container");
+			if (config.isDebug())
+				log.info("unordered group container");
 			
 			if (considerCardinalities) {
 				result.addAll(compoundContainer.getElements().stream()
@@ -177,18 +176,20 @@ public class TokenAnalysis {
 	}
 	
 	private TokenAnalysisPaths getTokenPathsContext(AbstractElement last, TokenAnalysisPaths prefix, boolean considerCardinalities, Set<AbstractElement> callStack) {
-		log.info("get context for: " + abstractElementToShortString(last) + (considerCardinalities ? " with" : " without") + " cardinalities");
+		if (config.isDebug())
+			log.info("get context for: " + abstractElementToShortString(last) + (considerCardinalities ? " with" : " without") + " cardinalities");
 		
 		List<AbstractElement> context = getNextElementsInContext(last, considerCardinalities);
 		
-		log.info(context.size());
-		log.info(context.stream().map(DebugUtils::abstractElementToShortString).collect(Collectors.toList()));
+		if (config.isDebug())
+			log.info(context.stream().map(DebugUtils::abstractElementToShortString).collect(Collectors.toList()));
 				
 		TokenAnalysisPaths result = TokenAnalysisPaths.empty(prefix);
 		
 		int actualNumberOfElements = 0;
 		for (AbstractElement element : context) {
-			log.info("context element: " + abstractElementToShortString(element));
+			if (config.isDebug())
+				log.info("context element: " + abstractElementToShortString(element));
 			TokenAnalysisPaths path = new TokenAnalysisPaths(prefix);
 			path.resetProgress();
 			// shortcut endless loops instead of throwing exception
@@ -212,7 +213,8 @@ public class TokenAnalysis {
 			if (path.isDone()) {
 				result = result.merge(path);
 			} else {
-				log.info("context analysis failed");
+				if (config.isDebug())
+					log.info("context analysis failed");
 				throw new TokenAnalysisAbortedException("context analysis failed");
 			}
 			actualNumberOfElements++;
@@ -222,8 +224,9 @@ public class TokenAnalysis {
 			// TODO: is this special case n)ecessary?
 			throw new TokenAnalysisAbortedException("context analysis failed: no context");
 		}
-			
-		log.info("done");
+		
+		if (config.isDebug())
+			log.info("done");
 		return result;
 	}
 	
@@ -248,9 +251,7 @@ public class TokenAnalysis {
 		
 		return result;
 	}
-	private TokenAnalysisPaths getTokenPathsTrivial(UnorderedGroup path, TokenAnalysisPaths prefix, boolean shortcutEndlessLoops) {
-		log.info("unordered group");
-		
+	private TokenAnalysisPaths getTokenPathsTrivial(UnorderedGroup path, TokenAnalysisPaths prefix, boolean shortcutEndlessLoops) {		
 		TokenAnalysisPaths result;
 		TokenAnalysisPaths current;
 		
@@ -263,15 +264,10 @@ public class TokenAnalysis {
 		int currentPosition = result.getMinPosition();
 		
 		do {
-			log.info("unordered group loop");
-			
 			current = TokenAnalysisPaths.empty(result);
 			current.resetProgress();
 			for (AbstractElement element : path.getElements()) {
-				log.info(abstractElementToShortString(element));
-				log.info(current.hasProgress() + " - " + current.getSize());
 				current = current.merge(getTokenPaths(element, result, false, false, shortcutEndlessLoops));
-				log.info(current.hasProgress() + " - " + current.getSize());
 			}
 			
 			result.resetProgress();
@@ -457,17 +453,11 @@ public class TokenAnalysis {
 		return getTokenPathsContext(path, new TokenAnalysisPaths(indexes)).getTokenPaths();
 	}
 	
-	private boolean arePathsIdenticalSymbolic(AbstractElement path1, AbstractElement path2) throws SymbolicAnalysisFailedException {
-		// ignore symbolic analysis for the moment
-		// TODO
-		throw new SymbolicAnalysisFailedException(); 
-	}
-	
 	private List<Integer> range(int i, int j) {
 		return IntStream.rangeClosed(i, j).boxed().collect(Collectors.toList());
 	}
 	
-	private boolean arePathsIdenticalFallback(AbstractElement path1, AbstractElement path2) {
+	public boolean arePathsIdentical(AbstractElement path1, AbstractElement path2) {
 		if (config.isDebug()) {
 			log.info("path1: " + abstractElementToString(path1));
 			log.info("path2: " + abstractElementToString(path2));
@@ -492,18 +482,13 @@ public class TokenAnalysis {
 			int maxPosition1 = tokenPaths1.getMaxPosition();
 			int maxPosition2 = tokenPaths2.getMaxPosition();
 			
-			log.info("set1: " + tokenListSet1 + ", " + maxPosition1);
-			log.info("set2: " + tokenListSet2 + ", " + maxPosition2);
-			
 			if (!tokenListSet1.equals(tokenListSet2)) {
-				log.info("not identical");
 				return false;
 			}
 			
 			if (maxPosition1 < i + 1) {
 				// different max positions would have failed the equals-Operation
 				// if the max position is smaller than i + 1 the end of the path has been reached
-				log.info("identical");
 				return true;
 			}
 		}
@@ -514,14 +499,6 @@ public class TokenAnalysis {
 		throw new TokenAnalysisAbortedException("token limit exhausted while looking for identical paths");
 	}
 	
-	public boolean arePathsIdentical(AbstractElement path1, AbstractElement path2) {
-		try {
-			return arePathsIdenticalSymbolic(path1, path2);
-		} catch (SymbolicAnalysisFailedException e) {
-			return arePathsIdenticalFallback(path1, path2);
-		}
-	}
-	
 	private void tokenCombinations(Function<List<Integer>, Boolean> callback) {
 		MutablePrimitiveWrapper<Integer> limit = new MutablePrimitiveWrapper<>(config.getTokenLimit());
 		
@@ -560,7 +537,9 @@ public class TokenAnalysis {
 						return true;
 					}
 				} catch (TokenAnalysisAbortedException e) {
-					log.info("token combinations: " + e.getMessage());
+					if (config.isDebug())
+						log.info("tokens exhausted: " + e.getMessage());
+					
 					// tokens exhausted; abort current prefix
 					// set limit for calling functions so this index is not checked again
 					limit.set(i);
@@ -580,10 +559,7 @@ public class TokenAnalysis {
 		
 		MutablePrimitiveWrapper<List<List<Token>>> result = new MutablePrimitiveWrapper<List<List<Token>>>(null);
 		
-		log.info("cardinality: " + virtualCardinality);
-		
 		tokenCombinations(indexList -> {
-			log.info("current index list: " + indexList);
 			
 			// no context analysis // TODO why?
 			List<List<Token>> tokenListsForPath = getTokenPaths(element, virtualCardinality, indexList, false);
@@ -619,15 +595,15 @@ public class TokenAnalysis {
 		}
 		
 		tokenCombinations(indexList -> {
-			log.info("current index list: " + indexList);
 			
 			// will throw TokenAnalysisAborted if any path is too short
 			List<List<List<Token>>> tokenListsForPaths = paths.stream()
-					//.peek(p -> log.info("next path: " + p))
+					.peek(p -> {
+						if (config.isDebug())
+							log.info("next path: " + p);
+					})
 					.map(p -> getTokenPaths(p, indexList, true))
 					.collect(Collectors.toList());
-
-			log.info("token lists: " + tokenListsForPaths);
 			
 			int size = result.size();
 			for (int i = 0; i < size; i++) {