Commit dbb6fefb authored by Kristof Szabados's avatar Kristof Szabados

could be final.

Signed-off-by: Kristof Szabados's avatarKristof Szabados <Kristof.Szabados@ericsson.com>
parent 3afb85ab
......@@ -41,7 +41,7 @@ public class PPDirectiveTokenFactory implements TokenFactory<CommonToken>{
@Override
public CommonToken create(final Pair<TokenSource, CharStream> source, final int type,
final String text, final int channel, final int start, final int stop, final int line, final int charPositionInLine) {
CommonToken t = new CommonToken(source, type, channel, start, stop);
final CommonToken t = new CommonToken(source, type, channel, start, stop);
t.setLine(line);
t.setStartIndex(start + token.getStartIndex());
t.setStopIndex(stop + token.getStartIndex() + 1);
......
......@@ -172,10 +172,11 @@ class ConditionalStateMachine {
ErrorReporter.INTERNAL_ERROR();
return;
}
ConditionalState newState = state.transition(transition);
final ConditionalState newState = state.transition(transition);
if (newState == null) {
// invalid transition was requested
TITANMarker marker = new TITANMarker(MessageFormat.format("Directive {0} after {1} is not a valid preprocessor conditional",
final TITANMarker marker = new TITANMarker(MessageFormat.format("Directive {0} after {1} is not a valid preprocessor conditional",
ppDirective.type.getName(), state.getName()), ppDirective.line, -1, -1, IMarker.SEVERITY_ERROR,
IMarker.PRIORITY_NORMAL);
errors.add(marker);
......@@ -224,7 +225,7 @@ class ConditionalStateStack {
case IF:
case IFDEF:
case IFNDEF: {
ConditionalStateMachine csm = new ConditionalStateMachine(ppDirective);
final ConditionalStateMachine csm = new ConditionalStateMachine(ppDirective);
stateStack.add(csm);
}
break;
......@@ -232,12 +233,12 @@ class ConditionalStateStack {
case ELSE:
case ENDIF: {
if (stateStack.isEmpty()) {
TITANMarker marker = new TITANMarker(MessageFormat.format(
final TITANMarker marker = new TITANMarker(MessageFormat.format(
"Directive {0} without corresponding #if/#ifdef/#ifndef directive", ppDirective.type.getName()),
ppDirective.line, -1, -1, IMarker.SEVERITY_ERROR, IMarker.PRIORITY_NORMAL);
unsupportedConstructs.add(marker);
} else {
ConditionalStateMachine topState = stateStack.peek();
final ConditionalStateMachine topState = stateStack.peek();
topState.transition(ppDirective, unsupportedConstructs);
if (topState.hasEnded()) {
stateStack.pop();
......@@ -257,7 +258,7 @@ class ConditionalStateStack {
* @return true to not filter
*/
public boolean isPassing() {
for (ConditionalStateMachine csm : stateStack) {
for (final ConditionalStateMachine csm : stateStack) {
if (!csm.isPassing()) {
return false;
}
......@@ -270,8 +271,8 @@ class ConditionalStateStack {
* error marker(s).
*/
public void eofCheck() {
for (ConditionalStateMachine csm : stateStack) {
TITANMarker marker = new TITANMarker(MessageFormat.format("{0} directive was not terminated",
for (final ConditionalStateMachine csm : stateStack) {
final TITANMarker marker = new TITANMarker(MessageFormat.format("{0} directive was not terminated",
csm.beginDirective.type.getName()), csm.beginDirective.line, -1, -1, IMarker.SEVERITY_ERROR,
IMarker.PRIORITY_NORMAL);
unsupportedConstructs.add(marker);
......@@ -363,7 +364,7 @@ public class PreprocessedTokenStream extends CommonTokenStream {
}
public void setMacros(final String[] definedList) {
for (String s : definedList) {
for (final String s : definedList) {
macros.put(s, "");
}
}
......@@ -377,30 +378,31 @@ public class PreprocessedTokenStream extends CommonTokenStream {
*/
private void processIncludeDirective(final PreprocessorDirective ppDirective) {
if (ppDirective.str == null || "".equals(ppDirective.str)) {
TITANMarker marker = new TITANMarker("File name was not provided", ppDirective.line, -1, -1, IMarker.SEVERITY_ERROR,
final TITANMarker marker = new TITANMarker("File name was not provided", ppDirective.line, -1, -1, IMarker.SEVERITY_ERROR,
IMarker.PRIORITY_NORMAL);
unsupportedConstructs.add(marker);
return;
}
IFile includedFile = GlobalParser.getProjectSourceParser(actualFile.getProject()).getTTCN3IncludeFileByName(ppDirective.str);
final IFile includedFile = GlobalParser.getProjectSourceParser(actualFile.getProject()).getTTCN3IncludeFileByName(ppDirective.str);
if (includedFile == null) {
TITANMarker marker = new TITANMarker(MessageFormat.format("Included file `{0}'' could not be found", ppDirective.str),
final TITANMarker marker = new TITANMarker(MessageFormat.format("Included file `{0}'' could not be found", ppDirective.str),
ppDirective.line, -1, -1, IMarker.SEVERITY_ERROR, IMarker.PRIORITY_NORMAL);
unsupportedConstructs.add(marker);
return;
}
// check extension
if (!GlobalParser.TTCNIN_EXTENSION.equals(includedFile.getFileExtension())) {
TITANMarker marker = new TITANMarker(MessageFormat.format("File `{0}'' does not have the `{1}'' extension", ppDirective.str,
final TITANMarker marker = new TITANMarker(MessageFormat.format("File `{0}'' does not have the `{1}'' extension", ppDirective.str,
GlobalParser.TTCNIN_EXTENSION), ppDirective.line, -1, -1, IMarker.SEVERITY_WARNING, IMarker.PRIORITY_NORMAL);
warnings.add(marker);
}
// check if the file is already loaded into an editor
String code = null;
if (EditorTracker.containsKey(includedFile)) {
List<ISemanticTITANEditor> editors = EditorTracker.getEditor(includedFile);
ISemanticTITANEditor editor = editors.get(0);
IDocument document = editor.getDocument();
final List<ISemanticTITANEditor> editors = EditorTracker.getEditor(includedFile);
final ISemanticTITANEditor editor = editors.get(0);
final IDocument document = editor.getDocument();
code = document.get();
}
// create lexer and set it up
......@@ -448,7 +450,8 @@ public class PreprocessedTokenStream extends CommonTokenStream {
ErrorReporter.logExceptionStackTrace(e);
return;
}
IFileInfo fileInfo = store.fetchInfo();
final IFileInfo fileInfo = store.fetchInfo();
rootInt = (int) fileInfo.getLength();
}
lexer.setTokenFactory(new CommonTokenFactory(true));
......@@ -479,15 +482,15 @@ public class PreprocessedTokenStream extends CommonTokenStream {
if (t == null) {
return 0;
}
int tokenType = t.getType();
final int tokenType = t.getType();
if (tokenType == Ttcn3Lexer.PREPROCESSOR_DIRECTIVE) {
lastPPDirectiveLocation = new Location(actualFile, t.getLine(), t.getStartIndex(), t.getStopIndex() + 1);
// 1. the first # shall be discarded
// 2. "\\\n" strings are removed, so multiline tokens, which are split by backslash are extracted to one line
final String text = t.getText().substring(1).replace("\\\n", "");
Reader reader = new StringReader( text );
CharStream charStream = new UnbufferedCharStream(reader);
PreprocessorDirectiveLexer lexer = new PreprocessorDirectiveLexer(charStream);
final Reader reader = new StringReader( text );
final CharStream charStream = new UnbufferedCharStream(reader);
final PreprocessorDirectiveLexer lexer = new PreprocessorDirectiveLexer(charStream);
lexer.setTokenFactory(new PPDirectiveTokenFactory(true, t));
lexerListener = new PPListener();
lexer.removeErrorListeners();
......@@ -502,7 +505,7 @@ public class PreprocessedTokenStream extends CommonTokenStream {
// 2. Changed from BufferedTokenStream to CommonTokenStream, otherwise tokens with "-> channel(HIDDEN)" are not filtered out in lexer.
final CommonTokenStream tokenStream = new CommonTokenStream( lexer );
PreprocessorDirectiveParser localParser = new PreprocessorDirectiveParser( tokenStream );
final PreprocessorDirectiveParser localParser = new PreprocessorDirectiveParser( tokenStream );
localParser.setBuildParseTree(false);
parserListener = new PPListener(localParser);
localParser.removeErrorListeners();
......@@ -518,23 +521,23 @@ public class PreprocessedTokenStream extends CommonTokenStream {
if (ppDirective != null) {
ppDirective.line = t.getLine();
if (ppDirective.isConditional()) {
boolean preIsPassing = condStateStack.isPassing();
final boolean preIsPassing = condStateStack.isPassing();
condStateStack.processDirective(ppDirective);
boolean postIsPassing = condStateStack.isPassing();
final boolean postIsPassing = condStateStack.isPassing();
if (preIsPassing != postIsPassing && tokenStreamStack.isEmpty() && getTokenSource() instanceof Ttcn3Lexer) {
// included files are ignored because of ambiguity
Location ppLocation = lastPPDirectiveLocation;
final Location ppLocation = lastPPDirectiveLocation;
if (ppLocation != null) {
if (preIsPassing) {
// switched to inactive: begin a new inactive location
Location loc = new Location(actualFile, ppLocation.getLine(),
final Location loc = new Location(actualFile, ppLocation.getLine(),
ppLocation.getEndOffset(), ppLocation.getEndOffset());
inactiveCodeLocations.add(loc);
} else {
// switched to active: end the current inactive location
int iclSize = inactiveCodeLocations.size();
final int iclSize = inactiveCodeLocations.size();
if (iclSize > 0) {
Location lastLocation = inactiveCodeLocations.get(iclSize - 1);
final Location lastLocation = inactiveCodeLocations.get(iclSize - 1);
lastLocation.setEndOffset(ppLocation.getOffset());
}
}
......@@ -549,7 +552,7 @@ public class PreprocessedTokenStream extends CommonTokenStream {
case INCLUDE: {
if (tokenStreamStack.size() > RECURSION_LIMIT) {
// dumb but safe defense against infinite recursion, default value from gcc
TITANMarker marker = new TITANMarker(
final TITANMarker marker = new TITANMarker(
"Maximum #include recursion depth reached", ppDirective.line,
-1, -1, IMarker.SEVERITY_ERROR, IMarker.PRIORITY_NORMAL);
unsupportedConstructs.add(marker);
......@@ -559,15 +562,15 @@ public class PreprocessedTokenStream extends CommonTokenStream {
}
break;
case ERROR: {
String errorMessage = ppDirective.str == null ? "" : ppDirective.str;
TITANMarker marker = new TITANMarker(errorMessage, ppDirective.line, -1, -1,
final String errorMessage = ppDirective.str == null ? "" : ppDirective.str;
final TITANMarker marker = new TITANMarker(errorMessage, ppDirective.line, -1, -1,
IMarker.SEVERITY_ERROR, IMarker.PRIORITY_NORMAL);
unsupportedConstructs.add(marker);
}
break;
case WARNING: {
String warningMessage = ppDirective.str == null ? "" : ppDirective.str;
TITANMarker marker = new TITANMarker(warningMessage, ppDirective.line, -1, -1,
final String warningMessage = ppDirective.str == null ? "" : ppDirective.str;
final TITANMarker marker = new TITANMarker(warningMessage, ppDirective.line, -1, -1,
IMarker.SEVERITY_WARNING, IMarker.PRIORITY_NORMAL);
warnings.add(marker);
}
......@@ -576,13 +579,13 @@ public class PreprocessedTokenStream extends CommonTokenStream {
case LINEMARKER:
case PRAGMA:
case NULL: {
String reportPreference = Platform.getPreferencesService().getString(
final String reportPreference = Platform.getPreferencesService().getString(
ProductConstants.PRODUCT_ID_DESIGNER,
PreferenceConstants.REPORT_IGNORED_PREPROCESSOR_DIRECTIVES,
GeneralConstants.WARNING, null);
if (!GeneralConstants.IGNORE.equals(reportPreference)) {
boolean isError = GeneralConstants.ERROR.equals(reportPreference);
TITANMarker marker = new TITANMarker(MessageFormat.format(
final boolean isError = GeneralConstants.ERROR.equals(reportPreference);
final TITANMarker marker = new TITANMarker(MessageFormat.format(
"Preprocessor directive {0} is ignored",
ppDirective.type.getName()), ppDirective.line, -1, -1,
isError ? IMarker.SEVERITY_ERROR : IMarker.SEVERITY_WARNING,
......@@ -605,7 +608,7 @@ public class PreprocessedTokenStream extends CommonTokenStream {
if (!tokenStreamStack.isEmpty()) {
// the included file ended, drop lexer
// from the stack and ignore EOF token
TokenStreamData tsd = tokenStreamStack.pop();
final TokenStreamData tsd = tokenStreamStack.pop();
if (parser != null) {
if (tokenStreamStack.isEmpty()) {
parser.setActualFile(actualFile);
......
......@@ -25,7 +25,7 @@ public final class ReparseUtilities {
* @return all token types that are valid for reparsing (EOF is not)
* */
public static List<Integer> getAllValidTokenTypes() {
List<Integer> result = new ArrayList<Integer>();
final List<Integer> result = new ArrayList<Integer>();
for (int i = Ttcn3Lexer.EOF + 1; i < Ttcn3Lexer.LEXERPLACEHOLDER; i++) {
result.add(Integer.valueOf(i));
......
......@@ -130,7 +130,8 @@ public class TTCN3Analyzer implements ISourceAnalyzer {
ErrorReporter.logExceptionStackTrace(e);
return;
}
IFileInfo fileInfo = store.fetchInfo();
final IFileInfo fileInfo = store.fetchInfo();
rootInt = (int) fileInfo.getLength();
} else {
return;
......@@ -166,11 +167,10 @@ public class TTCN3Analyzer implements ISourceAnalyzer {
*/
private void parse( final Reader aReader, final int aFileLength, final IFile aEclipseFile ) {
final IPreferencesService prefs = Platform.getPreferencesService();
boolean realtimeEnabled = prefs.getBoolean(ProductConstants.PRODUCT_ID_DESIGNER, PreferenceConstants.ENABLEREALTIMEEXTENSION, false, null);;
CharStream charStream = new UnbufferedCharStream( aReader );
Ttcn3Lexer lexer = new Ttcn3Lexer( charStream );
final boolean realtimeEnabled = prefs.getBoolean(ProductConstants.PRODUCT_ID_DESIGNER, PreferenceConstants.ENABLEREALTIMEEXTENSION, false, null);;
final CharStream charStream = new UnbufferedCharStream( aReader );
final Ttcn3Lexer lexer = new Ttcn3Lexer( charStream );
lexer.setCommentTodo( true );
lexer.setTokenFactory( new CommonTokenFactory( true ) );
lexer.initRootInterval( aFileLength );
......@@ -178,7 +178,7 @@ public class TTCN3Analyzer implements ISourceAnalyzer {
lexer.enableRealtime();
}
TitanListener lexerListener = new TitanListener();
final TitanListener lexerListener = new TitanListener();
// remove ConsoleErrorListener
lexer.removeErrorListeners();
lexer.addErrorListener(lexerListener);
......@@ -215,13 +215,13 @@ public class TTCN3Analyzer implements ISourceAnalyzer {
// remove ConsoleErrorListener
parser.removeErrorListeners();
TitanListener parserListener = new TitanListener();
final TitanListener parserListener = new TitanListener();
parser.addErrorListener( parserListener );
// This is added because of the following ANTLR 4 bug:
// Memory Leak in PredictionContextCache #499
// https://github.com/antlr/antlr4/issues/499
DFA[] decisionToDFA = parser.getInterpreter().decisionToDFA;
final DFA[] decisionToDFA = parser.getInterpreter().decisionToDFA;
parser.setInterpreter(new ParserATNSimulator(parser, parser.getATN(), decisionToDFA, new PredictionContextCache()));
//try SLL mode
......@@ -239,7 +239,7 @@ public class TTCN3Analyzer implements ISourceAnalyzer {
if (!warningsAndErrors.isEmpty() || !mErrorsStored.isEmpty()) {
//SLL mode might have failed, try LL mode
try {
CharStream charStream2 = new UnbufferedCharStream( aReader );
final CharStream charStream2 = new UnbufferedCharStream( aReader );
lexer.setInputStream(charStream2);
//lexer.reset();
parser.reset();
......
......@@ -34,16 +34,16 @@ public final class TTCN3ReferenceAnalyzer {
public Reference parse(final IFile file, final String code, final boolean reportErrors, final int aLine, final int aOffset) {
Reference reference = null;
Reader reader = new StringReader( code );
CharStream charStream = new UnbufferedCharStream( reader );
Ttcn3Lexer lexer = new Ttcn3Lexer( charStream );
final Reader reader = new StringReader( code );
final CharStream charStream = new UnbufferedCharStream( reader );
final Ttcn3Lexer lexer = new Ttcn3Lexer( charStream );
lexer.setTokenFactory( new CommonTokenFactory( true ) );
lexer.initRootInterval( code.length() );
lexer.removeErrorListeners();
final CommonTokenStream tokenStream = new CommonTokenStream( lexer );
Ttcn3Parser parser = new Ttcn3Parser( tokenStream );
final Ttcn3Parser parser = new Ttcn3Parser( tokenStream );
ParserUtilities.setBuildParseTree( parser );
lexer.setActualFile(file);
......@@ -71,16 +71,16 @@ public final class TTCN3ReferenceAnalyzer {
public Reference parseForCompletion(final IFile file, final String code) {
Reference reference = null;
Reader reader = new StringReader( code );
CharStream charStream = new UnbufferedCharStream( reader );
Ttcn3KeywordlessLexer lexer = new Ttcn3KeywordlessLexer( charStream );
final Reader reader = new StringReader( code );
final CharStream charStream = new UnbufferedCharStream( reader );
final Ttcn3KeywordlessLexer lexer = new Ttcn3KeywordlessLexer( charStream );
lexer.setTokenFactory( new CommonTokenFactory( true ) );
lexer.initRootInterval( code.length() );
lexer.removeErrorListeners();
final CommonTokenStream tokenStream = new CommonTokenStream( lexer );
Ttcn3Parser parser = new Ttcn3Parser( tokenStream );
final Ttcn3Parser parser = new Ttcn3Parser( tokenStream );
ParserUtilities.setBuildParseTree( parser );
lexer.setActualFile(file);
......
......@@ -147,7 +147,7 @@ public final class TTCN3ReparseUpdater {
if (offset > modificationStartOffset) {
location.setEndOffset(Math.max(offset + shift, modificationStartOffset));
}
int line = location.getLine();
final int line = location.getLine();
if (line > firstLine) {
location.setLine(line + lineShift);
} else if (line == firstLine && offset > modificationStartOffset) {
......@@ -172,23 +172,24 @@ public final class TTCN3ReparseUpdater {
return false;
}
int line = getLineOfOffset(code, modificationStartOffset);
int column = getPositionInLine(code, modificationStartOffset);
final int line = getLineOfOffset(code, modificationStartOffset);
final int column = getPositionInLine(code, modificationStartOffset);
String substring;
if (code.length() <= modificationEndOffset + shift) {
substring = code.substring(modificationStartOffset);
} else {
substring = code.substring(modificationStartOffset, modificationEndOffset + shift);
}
Reader reader = new StringReader(substring);
CharStream charStream = new UnbufferedCharStream(reader);
Ttcn3Lexer lexer = new Ttcn3Lexer(charStream);
final Reader reader = new StringReader(substring);
final CharStream charStream = new UnbufferedCharStream(reader);
final Ttcn3Lexer lexer = new Ttcn3Lexer(charStream);
lexer.setTokenFactory( new CommonTokenFactory( true ) );
lexer.setLine( line + 1 );
lexer.setCharPositionInLine(column);
lexer.initRootInterval(modificationEndOffset - modificationStartOffset + 1);
Token token = lexer.nextToken();
final Token token = lexer.nextToken();
if (token == null) {
return false;
}
......@@ -213,23 +214,24 @@ public final class TTCN3ReparseUpdater {
return false;
}
int line = getLineOfOffset(code, modificationStartOffset);
int column = getPositionInLine(code, modificationStartOffset);
final int line = getLineOfOffset(code, modificationStartOffset);
final int column = getPositionInLine(code, modificationStartOffset);
String substring;
if (code.length() <= modificationEndOffset + shift) {
substring = code.substring(modificationStartOffset);
} else {
substring = code.substring(modificationStartOffset, modificationEndOffset + shift);
}
Reader reader = new StringReader(substring);
CharStream charStream = new UnbufferedCharStream(reader);
Ttcn3Lexer lexer = new Ttcn3Lexer(charStream);
final Reader reader = new StringReader(substring);
final CharStream charStream = new UnbufferedCharStream(reader);
final Ttcn3Lexer lexer = new Ttcn3Lexer(charStream);
lexer.setTokenFactory( new CommonTokenFactory( true ) );
lexer.setLine( line + 1 );
lexer.setCharPositionInLine(column);
lexer.initRootInterval(modificationEndOffset - modificationStartOffset + 1);
Token token = lexer.nextToken();
final Token token = lexer.nextToken();
if (token == null) {
return false;
}
......@@ -279,12 +281,13 @@ public final class TTCN3ReparseUpdater {
} else {
substring = code.substring(modificationStartOffset, modificationEndOffset + shift);
}
int rangeEnd = substring.length();
final int rangeEnd = substring.length();
int nextPos = 0;
boolean insideString = false;
boolean insideSingleComment = false;
boolean insideMultiComment = false;
Stack<String> elements = new Stack<String>();
final Stack<String> elements = new Stack<String>();
int unclosedStarting = 0;
int unclosedEnding = 0;
try {
......@@ -407,7 +410,7 @@ public final class TTCN3ReparseUpdater {
*/
private void reportSpecificSyntaxErrors() {
if (mErrors != null) {
Location temp = new Location(file, firstLine, modificationStartOffset, modificationEndOffset + shift);
final Location temp = new Location(file, firstLine, modificationStartOffset, modificationEndOffset + shift);
for (int i = 0; i < mErrors.size(); i++) {
ParserMarkerSupport.createOnTheFlySyntacticMarker(file, mErrors.get(i), IMarker.SEVERITY_ERROR, temp);
}
......@@ -417,17 +420,17 @@ public final class TTCN3ReparseUpdater {
public final void reportSyntaxErrors() {
reportSpecificSyntaxErrors();
if (warningsAndErrors != null) {
for (TITANMarker marker : warningsAndErrors) {
for (final TITANMarker marker : warningsAndErrors) {
if (file.isAccessible()) {
Location location = new Location(file, marker.getLine(), marker.getOffset(), marker.getEndOffset());
final Location location = new Location(file, marker.getLine(), marker.getOffset(), marker.getEndOffset());
location.reportExternalProblem(marker.getMessage(), marker.getSeverity(), GeneralConstants.ONTHEFLY_SYNTACTIC_MARKER);
}
}
}
if (unsupportedConstructs != null && !unsupportedConstructs.isEmpty()) {
Iterator<TITANMarker> iterator = unsupportedConstructs.iterator();
final Iterator<TITANMarker> iterator = unsupportedConstructs.iterator();
while (iterator.hasNext()) {
TITANMarker marker = iterator.next();
final TITANMarker marker = iterator.next();
if (marker.getOffset() >= modificationEndOffset) {
marker.setOffset(marker.getOffset() + shift);
marker.setEndOffset(marker.getEndOffset() + shift);
......@@ -478,9 +481,9 @@ public final class TTCN3ReparseUpdater {
// double wideparsing = System.nanoTime();
mErrors = null;
warningsAndErrors = null;
Iterator<TITANMarker> iterator = unsupportedConstructs.iterator();
final Iterator<TITANMarker> iterator = unsupportedConstructs.iterator();
while (iterator.hasNext()) {
TITANMarker marker = iterator.next();
final TITANMarker marker = iterator.next();
if ((marker.getOffset() > modificationStartOffset && marker.getOffset() <= modificationEndOffset)
|| (marker.getEndOffset() > modificationStartOffset && marker.getEndOffset() <= modificationEndOffset)) {
iterator.remove();
......@@ -493,21 +496,22 @@ public final class TTCN3ReparseUpdater {
return Integer.MAX_VALUE;
}
int line = getLineOfOffset(code, modificationStartOffset);
final int line = getLineOfOffset(code, modificationStartOffset);
String substring;
if (code.length() <= modificationEndOffset + shift) {
substring = code.substring(modificationStartOffset);
} else {
substring = code.substring(modificationStartOffset, modificationEndOffset + shift);
}
Reader reader = new StringReader(substring);
CharStream charStream = new UnbufferedCharStream(reader);
Ttcn3Lexer lexer = new Ttcn3Lexer(charStream);
final Reader reader = new StringReader(substring);
final CharStream charStream = new UnbufferedCharStream(reader);
final Ttcn3Lexer lexer = new Ttcn3Lexer(charStream);
lexer.setTokenFactory( new CommonTokenFactory( true ) );
lexer.initRootInterval(modificationEndOffset - modificationStartOffset + 1);
// lexer and parser listener
TitanListener parserListener = new TitanListener();
final TitanListener parserListener = new TitanListener();
// remove ConsoleErrorListener
lexer.removeErrorListeners();
lexer.addErrorListener(parserListener);
......@@ -519,7 +523,7 @@ public final class TTCN3ReparseUpdater {
// 2. Changed from BufferedTokenStream to CommonTokenStream, otherwise tokens with "-> channel(HIDDEN)" are not filtered out in lexer.
final CommonTokenStream tokenStream = new CommonTokenStream( lexer );
Ttcn3Reparser parser = new Ttcn3Reparser( tokenStream );
final Ttcn3Reparser parser = new Ttcn3Reparser( tokenStream );
ParserUtilities.setBuildParseTree( parser );
lexer.setActualFile(file);
......
......@@ -53,7 +53,7 @@ public class Ttcn3FileReparser implements ITtcn3FileReparser {
public void reparse(final Ttcn3Reparser parser) {
final ParseTree root = parser.pr_TTCN3File();
ParserUtilities.logParseTree( root, parser );
TTCN3Module actualTtcn3Module = parser.getModule();
final TTCN3Module actualTtcn3Module = parser.getModule();
if (actualTtcn3Module != null && actualTtcn3Module.getIdentifier() != null) {
mSourceParser.getSemanticAnalyzer().addModule(actualTtcn3Module);
mFileMap.put(mFile, actualTtcn3Module.getName());
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment