use of org.eclipse.titan.designer.parsers.preprocess.PreprocessorDirective in project titan.EclipsePlug-ins by eclipse.
the class PreprocessedTokenStream method fetch.
@Override
public int fetch(int n) {
if (fetchedEOF) {
return 0;
}
int i = 0;
do {
Token t;
if (tokenStreamStack.isEmpty()) {
t = getTokenSource().nextToken();
} else {
t = tokenStreamStack.peek().getTokenSource().nextToken();
}
if (t == null) {
return 0;
}
int tokenType = t.getType();
if (tokenType == Ttcn3Lexer.PREPROCESSOR_DIRECTIVE) {
lastPPDirectiveLocation = new Location(actualFile, t.getLine(), t.getStartIndex(), t.getStopIndex() + 1);
// 1. the first # shall be discarded
// 2. "\\\n" strings are removed, so multiline tokens, which are split by backslash are extracted to one line
final String text = t.getText().substring(1).replace("\\\n", "");
Reader reader = new StringReader(text);
CharStream charStream = new UnbufferedCharStream(reader);
PreprocessorDirectiveLexer lexer = new PreprocessorDirectiveLexer(charStream);
lexer.setTokenFactory(new PPDirectiveTokenFactory(true, t));
lexerListener = new PPListener();
lexer.removeErrorListeners();
lexer.addErrorListener(lexerListener);
lexer.setLine(t.getLine());
lexer.setCharPositionInLine(t.getCharPositionInLine());
// 1. Previously it was UnbufferedTokenStream(lexer), but it was changed to BufferedTokenStream, because UnbufferedTokenStream seems to be unusable. It is an ANTLR 4 bug.
// Read this: https://groups.google.com/forum/#!topic/antlr-discussion/gsAu-6d3pKU
// pr_PatternChunk[StringBuilder builder, boolean[] uni]:
// $builder.append($v.text); <-- exception is thrown here: java.lang.UnsupportedOperationException: interval 85..85 not in token buffer window: 86..341
// 2. Changed from BufferedTokenStream to CommonTokenStream, otherwise tokens with "-> channel(HIDDEN)" are not filtered out in lexer.
final CommonTokenStream tokenStream = new CommonTokenStream(lexer);
PreprocessorDirectiveParser localParser = new PreprocessorDirectiveParser(tokenStream);
localParser.setBuildParseTree(false);
parserListener = new PPListener(localParser);
localParser.removeErrorListeners();
localParser.addErrorListener(parserListener);
localParser.setIsActiveCode(condStateStack.isPassing());
localParser.setMacros(macros);
localParser.setLine(t.getLine());
PreprocessorDirective ppDirective = null;
ppDirective = localParser.pr_Directive().ppDirective;
errorsStored.addAll(localParser.getErrorStorage());
warnings.addAll(localParser.getWarnings());
unsupportedConstructs.addAll(localParser.getUnsupportedConstructs());
if (ppDirective != null) {
ppDirective.line = t.getLine();
if (ppDirective.isConditional()) {
boolean preIsPassing = condStateStack.isPassing();
condStateStack.processDirective(ppDirective);
boolean postIsPassing = condStateStack.isPassing();
if (preIsPassing != postIsPassing && tokenStreamStack.isEmpty() && getTokenSource() instanceof Ttcn3Lexer) {
// included files are ignored because of ambiguity
Location ppLocation = lastPPDirectiveLocation;
if (ppLocation != null) {
if (preIsPassing) {
// switched to inactive: begin a new inactive location
Location loc = new Location(actualFile, ppLocation.getLine(), ppLocation.getEndOffset(), ppLocation.getEndOffset());
inactiveCodeLocations.add(loc);
} else {
// switched to active: end the current inactive location
int iclSize = inactiveCodeLocations.size();
if (iclSize > 0) {
Location lastLocation = inactiveCodeLocations.get(iclSize - 1);
lastLocation.setEndOffset(ppLocation.getOffset());
}
}
}
}
} else {
// other directive types
if (condStateStack.isPassing()) {
// directive
switch(ppDirective.type) {
case INCLUDE:
{
if (tokenStreamStack.size() > RECURSION_LIMIT) {
// dumb but safe defense against infinite recursion, default value from gcc
TITANMarker marker = new TITANMarker("Maximum #include recursion depth reached", ppDirective.line, -1, -1, IMarker.SEVERITY_ERROR, IMarker.PRIORITY_NORMAL);
unsupportedConstructs.add(marker);
} else {
// TODO: Makes the Eclipse slow down
processIncludeDirective(ppDirective);
}
}
break;
case ERROR:
{
String errorMessage = ppDirective.str == null ? "" : ppDirective.str;
TITANMarker marker = new TITANMarker(errorMessage, ppDirective.line, -1, -1, IMarker.SEVERITY_ERROR, IMarker.PRIORITY_NORMAL);
unsupportedConstructs.add(marker);
}
break;
case WARNING:
{
String warningMessage = ppDirective.str == null ? "" : ppDirective.str;
TITANMarker marker = new TITANMarker(warningMessage, ppDirective.line, -1, -1, IMarker.SEVERITY_WARNING, IMarker.PRIORITY_NORMAL);
warnings.add(marker);
}
break;
case LINECONTROL:
case LINEMARKER:
case PRAGMA:
case NULL:
{
String reportPreference = Platform.getPreferencesService().getString(ProductConstants.PRODUCT_ID_DESIGNER, PreferenceConstants.REPORT_IGNORED_PREPROCESSOR_DIRECTIVES, GeneralConstants.WARNING, null);
if (!GeneralConstants.IGNORE.equals(reportPreference)) {
boolean isError = GeneralConstants.ERROR.equals(reportPreference);
TITANMarker marker = new TITANMarker(MessageFormat.format("Preprocessor directive {0} is ignored", ppDirective.type.getName()), ppDirective.line, -1, -1, isError ? IMarker.SEVERITY_ERROR : IMarker.SEVERITY_WARNING, IMarker.PRIORITY_NORMAL);
if (isError) {
unsupportedConstructs.add(marker);
} else {
warnings.add(marker);
}
}
}
break;
default:
}
}
}
}
} else if (tokenType == Token.EOF) {
if (!tokenStreamStack.isEmpty()) {
// the included file ended, drop lexer
// from the stack and ignore EOF token
TokenStreamData tsd = tokenStreamStack.pop();
if (parser != null) {
if (tokenStreamStack.isEmpty()) {
parser.setActualFile(actualFile);
} else {
parser.setActualFile(tokenStreamStack.peek().file);
}
}
if (tsd.reader != null) {
try {
tsd.reader.close();
} catch (IOException e) {
}
}
} else {
fetchedEOF = true;
condStateStack.eofCheck();
tokens.add(t);
((CommonToken) t).setTokenIndex(tokens.size() - 1);
--n;
++i;
if (n == 0) {
return i;
}
}
} else {
if (condStateStack.isPassing()) {
tokens.add(t);
((CommonToken) t).setTokenIndex(tokens.size() - 1);
--n;
++i;
if (n == 0) {
return i;
}
}
}
} while (true);
}
Aggregations