use of antlr.RecognitionException in project groovy by apache.
the class GenericsUtils method parseClassNodesFromString.
public static ClassNode[] parseClassNodesFromString(final String option, final SourceUnit sourceUnit, final CompilationUnit compilationUnit, final MethodNode mn, final ASTNode usage) {
GroovyLexer lexer = new GroovyLexer(new StringReader("DummyNode<" + option + ">"));
final GroovyRecognizer rn = GroovyRecognizer.make(lexer);
try {
rn.classOrInterfaceType(true);
final AtomicReference<ClassNode> ref = new AtomicReference<ClassNode>();
AntlrParserPlugin plugin = new AntlrParserPlugin() {
@Override
public ModuleNode buildAST(final SourceUnit sourceUnit, final ClassLoader classLoader, final Reduction cst) throws ParserException {
ref.set(makeTypeWithArguments(rn.getAST()));
return null;
}
};
plugin.buildAST(null, null, null);
ClassNode parsedNode = ref.get();
// the returned node is DummyNode<Param1, Param2, Param3, ...)
GenericsType[] parsedNodeGenericsTypes = parsedNode.getGenericsTypes();
if (parsedNodeGenericsTypes == null) {
return null;
}
ClassNode[] signature = new ClassNode[parsedNodeGenericsTypes.length];
for (int i = 0; i < parsedNodeGenericsTypes.length; i++) {
final GenericsType genericsType = parsedNodeGenericsTypes[i];
signature[i] = resolveClassNode(sourceUnit, compilationUnit, mn, usage, genericsType.getType());
}
return signature;
} catch (RecognitionException e) {
sourceUnit.addError(new IncorrectTypeHintException(mn, e, usage.getLineNumber(), usage.getColumnNumber()));
} catch (TokenStreamException e) {
sourceUnit.addError(new IncorrectTypeHintException(mn, e, usage.getLineNumber(), usage.getColumnNumber()));
} catch (ParserException e) {
sourceUnit.addError(new IncorrectTypeHintException(mn, e, usage.getLineNumber(), usage.getColumnNumber()));
}
return null;
}
use of antlr.RecognitionException in project sonarqube by SonarSource.
the class ValidWhenLexer method nextToken.
public Token nextToken() throws TokenStreamException {
Token theRetToken = null;
tryAgain: for (; ; ) {
Token _token = null;
int _ttype = Token.INVALID_TYPE;
resetText();
try {
// for char stream error handling
try {
// for lexical error handling
switch(LA(1)) {
case '\t':
case '\n':
case '\r':
case ' ':
{
mWS(true);
theRetToken = _returnToken;
break;
}
case '-':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
{
mDECIMAL_LITERAL(true);
theRetToken = _returnToken;
break;
}
case '"':
case '\'':
{
mSTRING_LITERAL(true);
theRetToken = _returnToken;
break;
}
case '[':
{
mLBRACKET(true);
theRetToken = _returnToken;
break;
}
case ']':
{
mRBRACKET(true);
theRetToken = _returnToken;
break;
}
case '(':
{
mLPAREN(true);
theRetToken = _returnToken;
break;
}
case ')':
{
mRPAREN(true);
theRetToken = _returnToken;
break;
}
case '*':
{
mTHIS(true);
theRetToken = _returnToken;
break;
}
case '.':
case '_':
case 'a':
case 'b':
case 'c':
case 'd':
case 'e':
case 'f':
case 'g':
case 'h':
case 'i':
case 'j':
case 'k':
case 'l':
case 'm':
case 'n':
case 'o':
case 'p':
case 'q':
case 'r':
case 's':
case 't':
case 'u':
case 'v':
case 'w':
case 'x':
case 'y':
case 'z':
{
mIDENTIFIER(true);
theRetToken = _returnToken;
break;
}
case '=':
{
mEQUALSIGN(true);
theRetToken = _returnToken;
break;
}
case '!':
{
mNOTEQUALSIGN(true);
theRetToken = _returnToken;
break;
}
default:
if ((LA(1) == '0') && (LA(2) == 'x')) {
mHEX_LITERAL(true);
theRetToken = _returnToken;
} else if ((LA(1) == '<') && (LA(2) == '=')) {
mLESSEQUALSIGN(true);
theRetToken = _returnToken;
} else if ((LA(1) == '>') && (LA(2) == '=')) {
mGREATEREQUALSIGN(true);
theRetToken = _returnToken;
} else if ((LA(1) == '0') && (true)) {
mOCTAL_LITERAL(true);
theRetToken = _returnToken;
} else if ((LA(1) == '<') && (true)) {
mLESSTHANSIGN(true);
theRetToken = _returnToken;
} else if ((LA(1) == '>') && (true)) {
mGREATERTHANSIGN(true);
theRetToken = _returnToken;
} else {
if (LA(1) == EOF_CHAR) {
uponEOF();
_returnToken = makeToken(Token.EOF_TYPE);
} else {
throw new NoViableAltForCharException((char) LA(1), getFilename(), getLine(), getColumn());
}
}
}
// found SKIP token
if (_returnToken == null)
continue tryAgain;
_ttype = _returnToken.getType();
_ttype = testLiteralsTable(_ttype);
_returnToken.setType(_ttype);
return _returnToken;
} catch (RecognitionException e) {
throw new TokenStreamRecognitionException(e);
}
} catch (CharStreamException cse) {
if (cse instanceof CharStreamIOException) {
throw new TokenStreamIOException(((CharStreamIOException) cse).io);
} else {
throw new TokenStreamException(cse.getMessage());
}
}
}
}
use of antlr.RecognitionException in project cxf by apache.
the class IDLParser method finder_dcl.
public final void finder_dcl() throws RecognitionException, TokenStreamException {
returnAST = null;
ASTPair currentAST = new ASTPair();
AST finder_dcl_AST = null;
try {
// for error handling
AST tmp214_AST = null;
tmp214_AST = astFactory.create(LT(1));
astFactory.makeASTRoot(currentAST, tmp214_AST);
match(LITERAL_finder);
identifier();
astFactory.addASTChild(currentAST, returnAST);
match(LPAREN);
init_param_decls();
astFactory.addASTChild(currentAST, returnAST);
match(RPAREN);
{
switch(LA(1)) {
case LITERAL_raises:
{
raises_expr();
astFactory.addASTChild(currentAST, returnAST);
break;
}
case SEMI:
{
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
finder_dcl_AST = (AST) currentAST.root;
} catch (RecognitionException ex) {
if (inputState.guessing == 0) {
reportError(ex);
consume();
consumeUntil(_tokenSet_10);
} else {
throw ex;
}
}
returnAST = finder_dcl_AST;
}
use of antlr.RecognitionException in project cxf by apache.
the class IDLParser method union_type.
public final void union_type() throws RecognitionException, TokenStreamException {
returnAST = null;
ASTPair currentAST = new ASTPair();
AST union_type_AST = null;
try {
// for error handling
AST tmp120_AST = null;
tmp120_AST = astFactory.create(LT(1));
astFactory.makeASTRoot(currentAST, tmp120_AST);
match(LITERAL_union);
identifier();
astFactory.addASTChild(currentAST, returnAST);
match(LITERAL_switch);
match(LPAREN);
switch_type_spec();
astFactory.addASTChild(currentAST, returnAST);
match(RPAREN);
match(LCURLY);
switch_body();
astFactory.addASTChild(currentAST, returnAST);
match(RCURLY);
union_type_AST = (AST) currentAST.root;
} catch (RecognitionException ex) {
if (inputState.guessing == 0) {
reportError(ex);
consume();
consumeUntil(_tokenSet_23);
} else {
throw ex;
}
}
returnAST = union_type_AST;
}
use of antlr.RecognitionException in project cxf by apache.
the class IDLParser method attr_declarator.
public final void attr_declarator() throws RecognitionException, TokenStreamException {
returnAST = null;
ASTPair currentAST = new ASTPair();
AST attr_declarator_AST = null;
try {
// for error handling
simple_declarator();
astFactory.addASTChild(currentAST, returnAST);
{
boolean synPredMatched219 = false;
if (((LA(1) == SEMI || LA(1) == LITERAL_getraises || LA(1) == LITERAL_setraises) && (_tokenSet_58.member(LA(2))) && (_tokenSet_59.member(LA(3))) && (_tokenSet_60.member(LA(4))))) {
int _m219 = mark();
synPredMatched219 = true;
inputState.guessing++;
try {
{
switch(LA(1)) {
case LITERAL_getraises:
{
match(LITERAL_getraises);
break;
}
case LITERAL_setraises:
{
match(LITERAL_setraises);
break;
}
default:
{
throw new NoViableAltException(LT(1), getFilename());
}
}
}
} catch (RecognitionException pe) {
synPredMatched219 = false;
}
rewind(_m219);
inputState.guessing--;
}
if (synPredMatched219) {
attr_raises_expr();
astFactory.addASTChild(currentAST, returnAST);
} else if ((LA(1) == SEMI || LA(1) == COMMA) && (_tokenSet_61.member(LA(2))) && (_tokenSet_62.member(LA(3))) && (_tokenSet_63.member(LA(4)))) {
{
_loop221: do {
if ((LA(1) == COMMA)) {
match(COMMA);
simple_declarator();
astFactory.addASTChild(currentAST, returnAST);
} else {
break _loop221;
}
} while (true);
}
} else {
throw new NoViableAltException(LT(1), getFilename());
}
}
attr_declarator_AST = (AST) currentAST.root;
} catch (RecognitionException ex) {
if (inputState.guessing == 0) {
reportError(ex);
consume();
consumeUntil(_tokenSet_10);
} else {
throw ex;
}
}
returnAST = attr_declarator_AST;
}
Aggregations