Commit 75892484 authored by Elemer Lelik's avatar Elemer Lelik
Browse files

Merge pull request #7 from alovassy/master

artf707891 : CfgParser: Storing hidden tokens
parents 9c18126f 1738d7d4
package org.eclipse.titan.common.parsers;
import java.util.List;
import org.antlr.v4.runtime.Parser;
import org.antlr.v4.runtime.misc.Interval;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.ParseTreeVisitor;
//NOTE: it doesn't know its start and end tokens, it just contains a string
public class AddedParseTree implements ParseTree {
private String mText;
public List<ParseTree> children;
public AddedParseTree( final String aText ) {
mText = aText;
}
@Override
public Interval getSourceInterval() {
return null;
}
@Override
public int getChildCount() {
return children != null ? children.size() : 0;
}
@Override
public Object getPayload() {
return null;
}
@Override
public String toStringTree() {
return mText;
}
@Override
public <T> T accept(ParseTreeVisitor<? extends T> arg0) {
return null;
}
@Override
public ParseTree getChild( int i ) {
return children != null && i >= 0 && i < children.size() ? children.get( i ) : null;
}
@Override
public ParseTree getParent() {
return null;
}
@Override
public String getText() {
return mText;
}
@Override
public String toStringTree(Parser arg0) {
return mText;
}
public void setText(String aText) {
mText = aText;
}
}
......@@ -10,37 +10,55 @@ package org.eclipse.titan.common.parsers;
import java.util.ArrayList;
import java.util.List;
import org.antlr.v4.runtime.CommonToken;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.tree.ParseTree;
//TODO: rename to CfgParseTree, inherit from ParserRuleContext
/**
* @author Kristof Szabados
* @author Arpad Lovassy
*/
public class LocationAST {
private String mText;
private TokenStream mTokenStream;
private ParserRuleContext mRule;
private CommonHiddenStreamToken mHiddenAfter;
private CommonHiddenStreamToken mHiddenBefore;
public LocationAST(final String aText) {
mRule = new ParserRuleContext();
public LocationAST( final String aText ) {
setText( aText );
}
public LocationAST(final ParserRuleContext aRule) {
mRule = aRule;
public LocationAST( final ParserRuleContext aRule, TokenStream aTokenStream ) {
setRule( aRule );
mTokenStream = aTokenStream;
}
public LocationAST( final ParserRuleContext aRule ) {
setRule( aRule );
}
public LocationAST(final Token aToken) {
mRule = new ParserRuleContext();
mRule.addChild(aToken);
public LocationAST( final Token aToken ) {
setToken( aToken );
}
private void setToken( Token aToken ) {
ParserRuleContext rule = new ParserRuleContext();
rule.addChild( aToken );
setRule( rule );
}
private void setRule( ParserRuleContext aRule ) {
mRule = aRule;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("{ ");
sb.append(mText+", ");
if ( mRule != null ) {
sb.append("{ " + mRule.start + ", ");
sb.append("" + mRule.stop + ", ");
......@@ -57,15 +75,13 @@ public class LocationAST {
}
public String getText() {
if ( mText != null ) {
return mText;
}
return mRule != null ? mRule.getText() : null;
String text = mRule != null ? mRule.getText() : null;
return text;
}
public void setText(final String aText) {
mText = aText;
public void setText( final String aText ) {
CommonToken token = new CommonToken( 0, aText );
setToken( token );
}
public ParserRuleContext getRule() {
......@@ -75,7 +91,7 @@ public class LocationAST {
public LocationAST getParent() {
final ParserRuleContext parentRule = mRule != null ? mRule.getParent() : null;
return new LocationAST( parentRule );
return new LocationAST( parentRule, mTokenStream );
}
public LocationAST getFirstChild() {
......@@ -95,7 +111,7 @@ public class LocationAST {
final ParserRuleContext firstRule = (ParserRuleContext) firstParseTree;
return new LocationAST( firstRule );
return new LocationAST( firstRule, mTokenStream );
}
public void setFirstChild(final LocationAST aNode) {
......@@ -131,7 +147,7 @@ public class LocationAST {
final ParserRuleContext nextRule = (ParserRuleContext) nextParseTree;
return new LocationAST( nextRule );
return new LocationAST( nextRule, mTokenStream );
}
......@@ -197,10 +213,13 @@ public class LocationAST {
final ParserRuleContext parent = mRule.getParent();
if ( parent == null ) {
// no parent (root node)
return -1;
}
if ( parent.children == null ) {
// it should not happen, program error:
// parent's children list is not filled
return -1;
}
......@@ -218,8 +237,16 @@ public class LocationAST {
}
public int getType() {
// TODO: implement
//TODO: implement
return 0;
}
public TokenStream getTokenStream() {
return mTokenStream;
}
public void setTokenStream(TokenStream mTokenStream) {
this.mTokenStream = mTokenStream;
}
}
......@@ -15,9 +15,9 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.antlr.v4.runtime.BufferedTokenStream;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CommonTokenFactory;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.UnbufferedCharStream;
......@@ -65,6 +65,7 @@ public final class CfgAnalyzer {
private DefineSectionHandler defineSectionHandler = null;
private LoggingSectionHandler loggingSectionHandler = null;
private ParserRuleContext mParseTreeRoot = null;
private TokenStream mTokenStream = null;
private String mLogFileName = null;
private Integer mTcpPort = null;
private String mLocalAddress = null;
......@@ -168,6 +169,10 @@ public final class CfgAnalyzer {
return mParseTreeRoot;
}
public TokenStream getTokenStream() {
return mTokenStream;
}
public List<SyntacticErrorStorage> getErrorStorage() {
if (!lexerListener.getErrorsStored().isEmpty() && parserListener.getErrorsStored().isEmpty()) {
return lexerListener.getErrorsStored();
......@@ -225,20 +230,21 @@ public final class CfgAnalyzer {
lexer.removeErrorListeners(); // remove ConsoleErrorListener
lexer.addErrorListener(lexerListener);
// Previously it was UnbufferedTokenStream(lexer), but it was changed to BufferedTokenStream, because UnbufferedTokenStream seems to be unusable. It is an ANTLR 4 bug.
// 1. Previously it was UnbufferedTokenStream(lexer), but it was changed to BufferedTokenStream, because UnbufferedTokenStream seems to be unusable. It is an ANTLR 4 bug.
// Read this: https://groups.google.com/forum/#!topic/antlr-discussion/gsAu-6d3pKU
// pr_PatternChunk[StringBuilder builder, boolean[] uni]:
// $builder.append($v.text); <-- exception is thrown here: java.lang.UnsupportedOperationException: interval 85..85 not in token buffer window: 86..341
final TokenStream tokens = new BufferedTokenStream( lexer );
// 2. Changed from BufferedTokenStream to CommonTokenStream, otherwise tokens with "-> channel(HIDDEN)" are not filtered out in lexer.
final TokenStream tokens = new CommonTokenStream( lexer );
final CfgParser parser = new CfgParser(tokens);
//parser tree is built by default
parserListener = new TitanListener();
parser.removeErrorListeners(); // remove ConsoleErrorListener
parser.addErrorListener(parserListener);
mParseTreeRoot = parser.pr_ConfigFile();
warnings = parser.getWarnings();
//TODO: implement: fill rootInterval if needed
mTokenStream = tokens;
warnings = parser.getWarnings();
definitions = parser.getDefinitions();
final CfgParseResult cfgParseResult = parser.getCfgParseResult();
mExecuteElements = cfgParseResult.getExecuteElements();
......
......@@ -3,13 +3,13 @@ lexer grammar CfgLexer;
@header {}
@members{}
WS: [ \t\r\n\f]+ ->skip;
WS: [ \t\r\n\f]+ -> channel(HIDDEN);
LINE_COMMENT:
(
'//' ~[\r\n]*
| '#' ~[\r\n]*
) ->skip;
) -> channel(HIDDEN);
BLOCK_COMMENT: '/*' .*? '*/' -> channel(HIDDEN);
......@@ -41,13 +41,13 @@ COMPONENTS_SECTION1: '[COMPONENTS]'-> type(COMPONENTS_SECTION),mode(COMPONENTS
LOGGING_SECTION1: '[LOGGING]'-> type(LOGGING_SECTION),mode(LOGGING_SECTION_MODE);
PROFILER_SECTION1: '[PROFILER]'-> type(PROFILER_SECTION),mode(PROFILER_SECTION_MODE);
WS1: [ \t\r\n\f]+ ->skip;
WS1: [ \t\r\n\f]+ -> channel(HIDDEN);
LINE_COMMENT1:
(
'//' ~[\r\n]*
| '#' ~[\r\n]*
) ->skip;
BLOCK_COMMENT1: '/*' .*? '*/' -> skip;
) -> channel(HIDDEN);
BLOCK_COMMENT1: '/*' .*? '*/' -> channel(HIDDEN);
fragment DOT1: '.';
SEMICOLON1: ';';
PLUS1: '+';
......@@ -124,13 +124,13 @@ COMPONENTS_SECTION2: '[COMPONENTS]'-> type(COMPONENTS_SECTION),mode(COMPONENTS
LOGGING_SECTION2: '[LOGGING]'-> type(LOGGING_SECTION),mode(LOGGING_SECTION_MODE);
PROFILER_SECTION2: '[PROFILER]'-> type(PROFILER_SECTION),mode(PROFILER_SECTION_MODE);
WS2: [ \t\r\n\f]+ ->skip;
WS2: [ \t\r\n\f]+ -> channel(HIDDEN);
LINE_COMMENT2:
(
'//' ~[\r\n]*
| '#' ~[\r\n]*
) ->skip;
BLOCK_COMMENT2: '/*' .*? '*/' -> skip;
) -> channel(HIDDEN);
BLOCK_COMMENT2: '/*' .*? '*/' -> channel(HIDDEN);
STRING2: '"' .*? '"';
......@@ -149,13 +149,13 @@ COMPONENTS_SECTION3: '[COMPONENTS]'-> type(COMPONENTS_SECTION),mode(COMPONENTS
LOGGING_SECTION3: '[LOGGING]'-> type(LOGGING_SECTION),mode(LOGGING_SECTION_MODE);
PROFILER_SECTION3: '[PROFILER]'-> type(PROFILER_SECTION),mode(PROFILER_SECTION_MODE);
WS3: [ \t\r\n\f]+ ->skip;
WS3: [ \t\r\n\f]+ -> channel(HIDDEN);
LINE_COMMENT3:
(
'//' ~[\r\n]*
| '#' ~[\r\n]*
) ->skip;
BLOCK_COMMENT3: '/*' .*? '*/' -> skip;
) -> channel(HIDDEN);
BLOCK_COMMENT3: '/*' .*? '*/' -> channel(HIDDEN);
SEMICOLON3: ';';
fragment LETTER3: [A-Z|a-z];
fragment NUMBER3: [0-9];
......@@ -177,13 +177,13 @@ COMPONENTS_SECTION4: '[COMPONENTS]'-> type(COMPONENTS_SECTION),mode(COMPONENTS
LOGGING_SECTION4: '[LOGGING]'-> type(LOGGING_SECTION),mode(LOGGING_SECTION_MODE);
PROFILER_SECTION4: '[PROFILER]'-> type(PROFILER_SECTION),mode(PROFILER_SECTION_MODE);
WS4: [ \t\r\n\f]+ ->skip;
WS4: [ \t\r\n\f]+ -> channel(HIDDEN);
LINE_COMMENT4:
(
'//' ~[\r\n]*
| '#' ~[\r\n]*
) ->skip;
BLOCK_COMMENT4: '/*' .*? '*/' -> skip;
) -> channel(HIDDEN);
BLOCK_COMMENT4: '/*' .*? '*/' -> channel(HIDDEN);
STRING4: '"' .*? '"';
// define section
......@@ -201,13 +201,13 @@ COMPONENTS_SECTION5: '[COMPONENTS]'-> type(COMPONENTS_SECTION),mode(COMPONENTS
LOGGING_SECTION5: '[LOGGING]'-> type(LOGGING_SECTION),mode(LOGGING_SECTION_MODE);
PROFILER_SECTION5: '[PROFILER]'-> type(PROFILER_SECTION),mode(PROFILER_SECTION_MODE);
WS5: [ \t\r\n\f]+ ->skip;
WS5: [ \t\r\n\f]+ -> channel(HIDDEN);
LINE_COMMENT5:
(
'//' ~[\r\n]*
| '#' ~[\r\n]*
) ->skip;
BLOCK_COMMENT5: '/*' .*? '*/' -> skip;
) -> channel(HIDDEN);
BLOCK_COMMENT5: '/*' .*? '*/' -> channel(HIDDEN);
IPV6_5:
( 'A'..'F' | 'a'..'f' | '0'..'9' )*
':'
......@@ -290,13 +290,13 @@ COMPONENTS_SECTION6: '[COMPONENTS]'-> type(COMPONENTS_SECTION),mode(COMPONENTS
LOGGING_SECTION6: '[LOGGING]'-> type(LOGGING_SECTION),mode(LOGGING_SECTION_MODE);
PROFILER_SECTION6: '[PROFILER]'-> type(PROFILER_SECTION),mode(PROFILER_SECTION_MODE);
WS6: [ \t\r\n\f]+ ->skip;
WS6: [ \t\r\n\f]+ -> channel(HIDDEN);
LINE_COMMENT6:
(
'//' ~[\r\n]*
| '#' ~[\r\n]*
) ->skip;
BLOCK_COMMENT6: '/*' .*? '*/' -> skip;
) -> channel(HIDDEN);
BLOCK_COMMENT6: '/*' .*? '*/' -> channel(HIDDEN);
SEMICOLON6: ';';
ASSIGNMENTCHAR6: ':=';
STRING6: '"' .*? '"';
......@@ -324,18 +324,18 @@ COMPONENTS_SECTION7: '[COMPONENTS]'-> type(COMPONENTS_SECTION),mode(COMPONENTS
LOGGING_SECTION7: '[LOGGING]'-> type(LOGGING_SECTION),mode(LOGGING_SECTION_MODE);
PROFILER_SECTION7: '[PROFILER]'-> type(PROFILER_SECTION),mode(PROFILER_SECTION_MODE);
WS7: [ \t\r\n\f]+ ->skip;
WS7: [ \t\r\n\f]+ -> channel(HIDDEN);
LINE_COMMENT7:
(
'//' ~[\r\n]*
| '#' ~[\r\n]*
) ->skip;
) -> channel(HIDDEN);
fragment FR_LETTER7: [A-Za-z];
fragment FR_NUMBER7: [0-9];
fragment FR_DOT7: '.';
fragment FR_TTCN3IDENTIFIER7: FR_LETTER7 (FR_LETTER7 | FR_NUMBER7 | '_')*;
TTCN3IDENTIFIER7: FR_LETTER7 (FR_LETTER7 | FR_NUMBER7 | '_')*;
BLOCK_COMMENT7: '/*' .*? '*/' -> skip;
BLOCK_COMMENT7: '/*' .*? '*/' -> channel(HIDDEN);
STAR7: '*';
PLUS7: '+';
MINUS7: '-';
......@@ -391,13 +391,13 @@ COMPONENTS_SECTION8: '[COMPONENTS]'-> type(COMPONENTS_SECTION),mode(COMPONENTS
LOGGING_SECTION8: '[LOGGING]'-> type(LOGGING_SECTION),mode(LOGGING_SECTION_MODE);
PROFILER_SECTION8: '[PROFILER]'-> type(PROFILER_SECTION),mode(PROFILER_SECTION_MODE);
WS8: [ \t\r\n\f]+ ->skip;
WS8: [ \t\r\n\f]+ -> channel(HIDDEN);
LINE_COMMENT8:
(
'//' ~[\r\n]*
| '#' ~[\r\n]*
) ->skip;
BLOCK_COMMENT8: '/*' .*? '*/' -> skip;
) -> channel(HIDDEN);
BLOCK_COMMENT8: '/*' .*? '*/' -> channel(HIDDEN);
fragment FR_LETTER8: [A-Za-z];
fragment FR_NUMBER8: [0-9];
fragment FR_DOT8: '.';
......@@ -448,13 +448,13 @@ COMPONENTS_SECTION9: '[COMPONENTS]'-> type(COMPONENTS_SECTION),mode(COMPONENTS
LOGGING_SECTION9: '[LOGGING]'-> type(LOGGING_SECTION),mode(LOGGING_SECTION_MODE);
PROFILER_SECTION9: '[PROFILER]'-> type(PROFILER_SECTION),mode(PROFILER_SECTION_MODE);
WS9: [ \t\r\n\f]+ ->skip;
WS9: [ \t\r\n\f]+ -> channel(HIDDEN);
LINE_COMMENT9:
(
'//' ~[\r\n]*
| '#' ~[\r\n]*
) ->skip;
BLOCK_COMMENT9: '/*' .*? '*/' -> skip;
) -> channel(HIDDEN);
BLOCK_COMMENT9: '/*' .*? '*/' -> channel(HIDDEN);
SEMICOLON9: ';';
ASSIGNMENTCHAR9: ':'? '=';
CONCATCHAR9: '&=';
......@@ -578,13 +578,13 @@ COMPONENTS_SECTION10: '[COMPONENTS]'-> type(COMPONENTS_SECTION),mode(COMPONENT
LOGGING_SECTION10: '[LOGGING]'-> type(LOGGING_SECTION),mode(LOGGING_SECTION_MODE);
PROFILER_SECTION10: '[PROFILER]'-> type(PROFILER_SECTION),mode(PROFILER_SECTION_MODE);
WS10: [ \t\r\n\f]+ ->skip;
WS10: [ \t\r\n\f]+ -> channel(HIDDEN);
LINE_COMMENT10:
(
'//' ~[\r\n]*
| '#' ~[\r\n]*
) ->skip;
BLOCK_COMMENT10: '/*' .*? '*/' -> skip;
) -> channel(HIDDEN);
BLOCK_COMMENT10: '/*' .*? '*/' -> channel(HIDDEN);
SEMICOLON10: ';';
STAR10: '*';
ASSIGNMENTCHAR10: ':=';
......@@ -651,13 +651,13 @@ COMPONENTS_SECTION11: '[COMPONENTS]'-> type(COMPONENTS_SECTION),mode(COMPONENT
LOGGING_SECTION11: '[LOGGING]'-> type(LOGGING_SECTION),mode(LOGGING_SECTION_MODE);
PROFILER_SECTION11: '[PROFILER]'-> type(PROFILER_SECTION),mode(PROFILER_SECTION_MODE);
WS11: [ \t\r\n\f]+ ->skip;
WS11: [ \t\r\n\f]+ -> channel(HIDDEN);
LINE_COMMENT11:
(
'//' ~[\r\n]*
| '#' ~[\r\n]*
) ->skip;
BLOCK_COMMENT11: '/*' .*? '*/' -> skip;
) -> channel(HIDDEN);
BLOCK_COMMENT11: '/*' .*? '*/' -> channel(HIDDEN);
TTCN_EXECUTOR1: 'TTCN_EXECUTOR'; TTCN_ERROR1: 'TTCN_ERROR'; TTCN_WARNING1: 'TTCN_WARNING';
TTCN_PORTEVENT1: 'TTCN_PORTEVENT'; TTCN_TIMEROP1: 'TTCN_TIMEROP'; TTCN_VERDICTOP1: 'TTCN_VERDICTOP';
......@@ -801,13 +801,13 @@ COMPONENTS_SECTION12: '[COMPONENTS]'-> type(COMPONENTS_SECTION),mode(COMPONENT
LOGGING_SECTION12: '[LOGGING]'-> type(LOGGING_SECTION),mode(LOGGING_SECTION_MODE);
PROFILER_SECTION12: '[PROFILER]'-> type(PROFILER_SECTION),mode(PROFILER_SECTION_MODE);
WS12: [ \t\r\n\f]+ ->skip;
WS12: [ \t\r\n\f]+ -> channel(HIDDEN);
LINE_COMMENT12:
(
'//' ~[\r\n]*
| '#' ~[\r\n]*
) ->skip;
BLOCK_COMMENT12: '/*' .*? '*/' -> skip;
) -> channel(HIDDEN);
BLOCK_COMMENT12: '/*' .*? '*/' -> channel(HIDDEN);
CONCATCHAR12: '&=';
HEX12: [0-9|A-F|a-f];
......
......@@ -3,7 +3,7 @@ parser grammar CfgParser;
@header {
import java.util.HashMap;
import org.eclipse.titan.common.parsers.LocationAST;
import org.antlr.v4.runtime.tree.TerminalNodeImpl;
import org.eclipse.titan.common.parsers.TITANMarker;
import org.eclipse.titan.common.parsers.cfg.indices.ComponentSectionHandler;
import org.eclipse.titan.common.parsers.cfg.indices.DefineSectionHandler;
......@@ -353,7 +353,7 @@ pr_IncludeSection returns [ IncludeSection includeSection ]:
{ String fileName = $f.getText().substring( 1, $f.getText().length() - 1 );
$includeSection.addIncludeFileName( fileName );
mIncludeFiles.add( fileName );
includeSectionHandler.getFiles().add( new LocationAST($f) );
includeSectionHandler.getFiles().add( new TerminalNodeImpl( $f ) );
}
)*
;
......
......@@ -18,7 +18,6 @@ import org.eclipse.core.resources.IWorkspaceRoot;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.Path;
import org.eclipse.titan.common.logging.ErrorReporter;
import org.eclipse.titan.common.parsers.CommonHiddenStreamToken;
import org.eclipse.titan.common.parsers.LocationAST;
import org.eclipse.titan.common.path.PathConverter;
......@@ -27,7 +26,7 @@ import org.eclipse.titan.common.path.PathConverter;
* and is extracting data from them required by the executors (actually the Main Controller)
*
* @author Kristof Szabados
* @author Arpad Lovassy
* @author Arpad Lovassy
*/
public final class ConfigFileHandler {
private static final String ORIGINALLY_FROM = "//This part was originally found in file: ";
......@@ -180,20 +179,9 @@ public final class ConfigFileHandler {
//FIXME implement
}
/**
* Creates the String representation of the parsed tree of the root configuration file.
* Can also be used to filter out some of its nodes before printing it to a file.
*
* @see #print(org.eclipse.titan.common.parsers.LocationAST, StringBuilder)
* @param disallowedNodes the list of nodes that should be left out of the process.
* */
public StringBuilder toStringOriginal(final List<Integer> disallowedNodes){
return toStringInternal(originalASTs, disallowedNodes);
}
/**
* Creates the String representation of the parsed tree of all of the parsed files.
* Can be used to create a single configuration file instead of the hierarchy already exisiting.
* Can be used to create a single configuration file instead of the hierarchy already existing.
*
* @see #print(org.eclipse.titan.common.parsers.LocationAST, StringBuilder)
* @param disallowedNodes the list of nodes that should be left out of the process.
......@@ -238,8 +226,24 @@ public final class ConfigFileHandler {
mLogFileName = analyzer.getLogFileName();
localAddress = analyzer.getLocalAddress();
}
final LocationAST rootNode = new LocationAST( analyzer.getParseTreeRoot() );
if (analyzer.getTcpPort() != null) {
tcpPort = analyzer.getTcpPort();
}
if (analyzer.getLocalAddress() != null) {
localAddress = analyzer.getLocalAddress();
}
if (analyzer.getKillTimer() != null) {
killTimer = analyzer.getKillTimer();
}
if (analyzer.getNumHcs() != null) {
numHCs = analyzer.getNumHcs();
}
if (analyzer.isUnixDomainSocketEnabled() != null) {
unixDomainSocket = analyzer.isUnixDomainSocketEnabled();
}
final LocationAST rootNode = new LocationAST( analyzer.getParseTreeRoot(), analyzer.getTokenStream() );
if ( rootNode != null ) {
originalASTs.put( actualFile, rootNode );
......@@ -265,26 +269,8 @@ public final class ConfigFileHandler {
*
* @param root the tree root to start at.
*/
private void print(final LocationAST root, final StringBuilder stringbuilder) {
CommonHiddenStreamToken hidden = root.getHiddenBefore();
if(hidden != null){
while(hidden.getHiddenBefore() != null){
hidden = hidden.getHiddenBefore();
}
while(hidden != null){
stringbuilder.append(hidden.getText());
hidden = hidden.getHiddenAfter();
}
}
stringbuilder.append(root.getText());
LocationAST child = root.getFirstChild();
while(child != null){
final Integer tempType = child.getType();
if(disallowedNodes != null && !disallowedNodes.contains(tempType)){
print(child, stringbuilder);
}
child = child.getNextSibling();
}
private void print(final LocationAST aRoot, final StringBuilder aSb) {
ConfigTreeNodeUtilities.print( aRoot.getRule(), aRoot.getTokenStream(), aSb, disallowedNodes );