use of org.eclipse.titan.designer.parsers.ttcn3parser.Ttcn3Reparser.Pr_ErroneousAttributeSpecContext in project titan.EclipsePlug-ins by eclipse.
the class Definition method parseErrAttrSpecString.
private static ErroneousAttributeSpecification parseErrAttrSpecString(final AttributeSpecification aAttrSpec) {
String code = aAttrSpec.getSpecification();
if (code == null) {
return null;
}
final Location location = aAttrSpec.getLocation();
// code must be transformed, according to
// compiler2/ttcn3/charstring_la.l
// TODO
code = Ttcn3CharstringLexer.parseCharstringValue(code, location);
final Reader reader = new StringReader(code);
final CharStream charStream = new UnbufferedCharStream(reader);
final Ttcn3Lexer lexer = new Ttcn3Lexer(charStream);
lexer.setTokenFactory(new CommonTokenFactory(true));
// needs to be shifted by one because of the \" of the string
lexer.setCharPositionInLine(0);
// lexer and parser listener
final TitanListener parserListener = new TitanListener();
// remove ConsoleErrorListener
lexer.removeErrorListeners();
lexer.addErrorListener(parserListener);
// 1. Previously it was UnbufferedTokenStream(lexer), but it was changed to BufferedTokenStream, because UnbufferedTokenStream seems to be unusable. It is an ANTLR 4 bug.
// Read this: https://groups.google.com/forum/#!topic/antlr-discussion/gsAu-6d3pKU
// pr_PatternChunk[StringBuilder builder, boolean[] uni]:
// $builder.append($v.text); <-- exception is thrown here: java.lang.UnsupportedOperationException: interval 85..85 not in token buffer window: 86..341
// 2. Changed from BufferedTokenStream to CommonTokenStream, otherwise tokens with "-> channel(HIDDEN)" are not filtered out in lexer.
final CommonTokenStream tokenStream = new CommonTokenStream(lexer);
final Ttcn3Reparser parser = new Ttcn3Reparser(tokenStream);
ParserUtilities.setBuildParseTree(parser);
final IFile file = (IFile) location.getFile();
parser.setActualFile(file);
parser.setOffset(location.getOffset());
parser.setLine(location.getLine());
// remove ConsoleErrorListener
parser.removeErrorListeners();
parser.addErrorListener(parserListener);
MarkerHandler.markMarkersForRemoval(GeneralConstants.ONTHEFLY_SYNTACTIC_MARKER, location.getFile(), location.getOffset(), location.getEndOffset());
final Pr_ErroneousAttributeSpecContext root = parser.pr_ErroneousAttributeSpec();
ParserUtilities.logParseTree(root, parser);
final ErroneousAttributeSpecification returnValue = root.errAttrSpec;
final List<SyntacticErrorStorage> errors = parser.getErrors();
final List<TITANMarker> warnings = parser.getWarnings();
final List<TITANMarker> unsupportedConstructs = parser.getUnsupportedConstructs();
// add markers
if (errors != null) {
for (int i = 0; i < errors.size(); i++) {
final Location temp = new Location(location);
temp.setOffset(temp.getOffset());
ParserMarkerSupport.createOnTheFlySyntacticMarker(file, errors.get(i), IMarker.SEVERITY_ERROR, temp);
}
}
if (warnings != null) {
for (final TITANMarker marker : warnings) {
if (file.isAccessible()) {
final Location loc = new Location(file, marker.getLine(), marker.getOffset(), marker.getEndOffset());
loc.reportExternalProblem(marker.getMessage(), marker.getSeverity(), GeneralConstants.ONTHEFLY_SYNTACTIC_MARKER);
}
}
}
if (unsupportedConstructs != null) {
for (final TITANMarker marker : unsupportedConstructs) {
if (file.isAccessible()) {
final Location loc = new Location(file, marker.getLine(), marker.getOffset(), marker.getEndOffset());
loc.reportExternalProblem(marker.getMessage(), marker.getSeverity(), GeneralConstants.ONTHEFLY_SYNTACTIC_MARKER);
}
}
}
return returnValue;
}
Aggregations