use of antlr.TokenStreamRecognitionException in project groovy by apache.
the class AntlrParserPlugin method transformCSTIntoAST.
protected void transformCSTIntoAST(SourceUnit sourceUnit, Reader reader, SourceBuffer sourceBuffer) throws CompilationFailedException {
ast = null;
setController(sourceUnit);
// TODO find a way to inject any GroovyLexer/GroovyRecognizer
UnicodeEscapingReader unicodeReader = new UnicodeEscapingReader(reader, sourceBuffer);
UnicodeLexerSharedInputState inputState = new UnicodeLexerSharedInputState(unicodeReader);
GroovyLexer lexer = new GroovyLexer(inputState);
unicodeReader.setLexer(lexer);
GroovyRecognizer parser = GroovyRecognizer.make(lexer);
parser.setSourceBuffer(sourceBuffer);
tokenNames = parser.getTokenNames();
parser.setFilename(sourceUnit.getName());
// start parsing at the compilationUnit rule
try {
parser.compilationUnit();
} catch (TokenStreamRecognitionException tsre) {
RecognitionException e = tsre.recog;
SyntaxException se = new SyntaxException(e.getMessage(), e, e.getLine(), e.getColumn());
se.setFatal(true);
sourceUnit.addError(se);
} catch (RecognitionException e) {
SyntaxException se = new SyntaxException(e.getMessage(), e, e.getLine(), e.getColumn());
se.setFatal(true);
sourceUnit.addError(se);
} catch (TokenStreamException e) {
sourceUnit.addException(e);
}
ast = parser.getAST();
}
use of antlr.TokenStreamRecognitionException in project sonarqube by SonarSource.
the class ValidWhenLexer method nextToken.
public Token nextToken() throws TokenStreamException {
Token theRetToken = null;
tryAgain: for (; ; ) {
Token _token = null;
int _ttype = Token.INVALID_TYPE;
resetText();
try {
// for char stream error handling
try {
// for lexical error handling
switch(LA(1)) {
case '\t':
case '\n':
case '\r':
case ' ':
{
mWS(true);
theRetToken = _returnToken;
break;
}
case '-':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
{
mDECIMAL_LITERAL(true);
theRetToken = _returnToken;
break;
}
case '"':
case '\'':
{
mSTRING_LITERAL(true);
theRetToken = _returnToken;
break;
}
case '[':
{
mLBRACKET(true);
theRetToken = _returnToken;
break;
}
case ']':
{
mRBRACKET(true);
theRetToken = _returnToken;
break;
}
case '(':
{
mLPAREN(true);
theRetToken = _returnToken;
break;
}
case ')':
{
mRPAREN(true);
theRetToken = _returnToken;
break;
}
case '*':
{
mTHIS(true);
theRetToken = _returnToken;
break;
}
case '.':
case '_':
case 'a':
case 'b':
case 'c':
case 'd':
case 'e':
case 'f':
case 'g':
case 'h':
case 'i':
case 'j':
case 'k':
case 'l':
case 'm':
case 'n':
case 'o':
case 'p':
case 'q':
case 'r':
case 's':
case 't':
case 'u':
case 'v':
case 'w':
case 'x':
case 'y':
case 'z':
{
mIDENTIFIER(true);
theRetToken = _returnToken;
break;
}
case '=':
{
mEQUALSIGN(true);
theRetToken = _returnToken;
break;
}
case '!':
{
mNOTEQUALSIGN(true);
theRetToken = _returnToken;
break;
}
default:
if ((LA(1) == '0') && (LA(2) == 'x')) {
mHEX_LITERAL(true);
theRetToken = _returnToken;
} else if ((LA(1) == '<') && (LA(2) == '=')) {
mLESSEQUALSIGN(true);
theRetToken = _returnToken;
} else if ((LA(1) == '>') && (LA(2) == '=')) {
mGREATEREQUALSIGN(true);
theRetToken = _returnToken;
} else if ((LA(1) == '0') && (true)) {
mOCTAL_LITERAL(true);
theRetToken = _returnToken;
} else if ((LA(1) == '<') && (true)) {
mLESSTHANSIGN(true);
theRetToken = _returnToken;
} else if ((LA(1) == '>') && (true)) {
mGREATERTHANSIGN(true);
theRetToken = _returnToken;
} else {
if (LA(1) == EOF_CHAR) {
uponEOF();
_returnToken = makeToken(Token.EOF_TYPE);
} else {
throw new NoViableAltForCharException((char) LA(1), getFilename(), getLine(), getColumn());
}
}
}
// found SKIP token
if (_returnToken == null)
continue tryAgain;
_ttype = _returnToken.getType();
_ttype = testLiteralsTable(_ttype);
_returnToken.setType(_ttype);
return _returnToken;
} catch (RecognitionException e) {
throw new TokenStreamRecognitionException(e);
}
} catch (CharStreamException cse) {
if (cse instanceof CharStreamIOException) {
throw new TokenStreamIOException(((CharStreamIOException) cse).io);
} else {
throw new TokenStreamException(cse.getMessage());
}
}
}
}
use of antlr.TokenStreamRecognitionException in project groovy-core by groovy.
the class AntlrParserPlugin method transformCSTIntoAST.
protected void transformCSTIntoAST(SourceUnit sourceUnit, Reader reader, SourceBuffer sourceBuffer) throws CompilationFailedException {
ast = null;
setController(sourceUnit);
// TODO find a way to inject any GroovyLexer/GroovyRecognizer
UnicodeEscapingReader unicodeReader = new UnicodeEscapingReader(reader, sourceBuffer);
UnicodeLexerSharedInputState inputState = new UnicodeLexerSharedInputState(unicodeReader);
GroovyLexer lexer = new GroovyLexer(inputState);
unicodeReader.setLexer(lexer);
GroovyRecognizer parser = GroovyRecognizer.make(lexer);
parser.setSourceBuffer(sourceBuffer);
tokenNames = parser.getTokenNames();
parser.setFilename(sourceUnit.getName());
// start parsing at the compilationUnit rule
try {
parser.compilationUnit();
} catch (TokenStreamRecognitionException tsre) {
RecognitionException e = tsre.recog;
SyntaxException se = new SyntaxException(e.getMessage(), e, e.getLine(), e.getColumn());
se.setFatal(true);
sourceUnit.addError(se);
} catch (RecognitionException e) {
SyntaxException se = new SyntaxException(e.getMessage(), e, e.getLine(), e.getColumn());
se.setFatal(true);
sourceUnit.addError(se);
} catch (TokenStreamException e) {
sourceUnit.addException(e);
}
ast = parser.getAST();
}
use of antlr.TokenStreamRecognitionException in project checkstyle by checkstyle.
the class TreeWalker method processFiltered.
@Override
protected void processFiltered(File file, List<String> lines) throws CheckstyleException {
// check if already checked and passed the file
if (CommonUtils.matchesFileExtension(file, getFileExtensions())) {
final String msg = "%s occurred during the analysis of file %s.";
final String fileName = file.getPath();
try {
final FileText text = FileText.fromLines(file, lines);
final FileContents contents = new FileContents(text);
final DetailAST rootAST = parse(contents);
getMessageCollector().reset();
walk(rootAST, contents, AstState.ORDINARY);
final DetailAST astWithComments = appendHiddenCommentNodes(rootAST);
walk(astWithComments, contents, AstState.WITH_COMMENTS);
} catch (final TokenStreamRecognitionException tre) {
final String exceptionMsg = String.format(Locale.ROOT, msg, "TokenStreamRecognitionException", fileName);
throw new CheckstyleException(exceptionMsg, tre);
} catch (RecognitionException | TokenStreamException ex) {
final String exceptionMsg = String.format(Locale.ROOT, msg, ex.getClass().getSimpleName(), fileName);
throw new CheckstyleException(exceptionMsg, ex);
}
}
}
Aggregations