use of org.antlr.v4.runtime.Token in project antlr4 by antlr.
the class AttributeChecks method setAttr.
@Override
public void setAttr(String expr, Token x, Token rhs) {
if (g.isLexer()) {
errMgr.grammarError(ErrorType.ATTRIBUTE_IN_LEXER_ACTION, g.fileName, x, x.getText(), expr);
return;
}
if (node.resolver.resolveToAttribute(x.getText(), node) == null) {
ErrorType errorType = ErrorType.UNKNOWN_SIMPLE_ATTRIBUTE;
if (node.resolver.resolvesToListLabel(x.getText(), node)) {
// $ids for ids+=ID etc...
errorType = ErrorType.ASSIGNMENT_TO_LIST_LABEL;
}
errMgr.grammarError(errorType, g.fileName, x, x.getText(), expr);
}
new AttributeChecks(g, r, alt, node, rhs).examineAction();
}
use of org.antlr.v4.runtime.Token in project antlr4 by antlr.
the class BasicSemanticChecks method checkImport.
void checkImport(Token importID) {
Grammar delegate = g.getImportedGrammar(importID.getText());
if (delegate == null)
return;
List<Integer> validDelegators = validImportTypes.get(delegate.getType());
if (validDelegators != null && !validDelegators.contains(g.getType())) {
g.tool.errMgr.grammarError(ErrorType.INVALID_IMPORT, g.fileName, importID, g, delegate);
}
if (g.isCombined() && (delegate.name.equals(g.name + Grammar.getGrammarTypeToFileNameSuffix(ANTLRParser.LEXER)) || delegate.name.equals(g.name + Grammar.getGrammarTypeToFileNameSuffix(ANTLRParser.PARSER)))) {
g.tool.errMgr.grammarError(ErrorType.IMPORT_NAME_CLASH, g.fileName, importID, g, delegate);
}
}
use of org.antlr.v4.runtime.Token in project antlr4 by antlr.
the class SemanticPipeline method process.
public void process() {
if (g.ast == null)
return;
// COLLECT RULE OBJECTS
RuleCollector ruleCollector = new RuleCollector(g);
ruleCollector.process(g.ast);
// DO BASIC / EASY SEMANTIC CHECKS
int prevErrors = g.tool.errMgr.getNumErrors();
BasicSemanticChecks basics = new BasicSemanticChecks(g, ruleCollector);
basics.process();
if (g.tool.errMgr.getNumErrors() > prevErrors)
return;
// TRANSFORM LEFT-RECURSIVE RULES
prevErrors = g.tool.errMgr.getNumErrors();
LeftRecursiveRuleTransformer lrtrans = new LeftRecursiveRuleTransformer(g.ast, ruleCollector.rules.values(), g);
lrtrans.translateLeftRecursiveRules();
// don't continue if we got errors during left-recursion elimination
if (g.tool.errMgr.getNumErrors() > prevErrors)
return;
// STORE RULES IN GRAMMAR
for (Rule r : ruleCollector.rules.values()) {
g.defineRule(r);
}
// COLLECT SYMBOLS: RULES, ACTIONS, TERMINALS, ...
SymbolCollector collector = new SymbolCollector(g);
collector.process(g.ast);
// CHECK FOR SYMBOL COLLISIONS
SymbolChecks symcheck = new SymbolChecks(g, collector);
// side-effect: strip away redef'd rules.
symcheck.process();
for (GrammarAST a : collector.namedActions) {
g.defineAction(a);
}
// LINK (outermost) ALT NODES WITH Alternatives
for (Rule r : g.rules.values()) {
for (int i = 1; i <= r.numberOfAlts; i++) {
r.alt[i].ast.alt = r.alt[i];
}
}
// ASSIGN TOKEN TYPES
g.importTokensFromTokensFile();
if (g.isLexer()) {
assignLexerTokenTypes(g, collector.tokensDefs);
} else {
assignTokenTypes(g, collector.tokensDefs, collector.tokenIDRefs, collector.terminals);
}
symcheck.checkForModeConflicts(g);
assignChannelTypes(g, collector.channelDefs);
// CHECK RULE REFS NOW (that we've defined rules in grammar)
symcheck.checkRuleArgs(g, collector.rulerefs);
identifyStartRules(collector);
symcheck.checkForQualifiedRuleIssues(g, collector.qualifiedRulerefs);
// don't continue if we got symbol errors
if (g.tool.getNumErrors() > 0)
return;
// CHECK ATTRIBUTE EXPRESSIONS FOR SEMANTIC VALIDITY
AttributeChecks.checkAllAttributeExpressions(g);
UseDefAnalyzer.trackTokenRuleRefsInActions(g);
}
use of org.antlr.v4.runtime.Token in project antlr4 by antlr.
the class SemanticPipeline method assignTokenTypes.
void assignTokenTypes(Grammar g, List<GrammarAST> tokensDefs, List<GrammarAST> tokenIDs, List<GrammarAST> terminals) {
// create token types for tokens { A, B, C } ALIASES
for (GrammarAST alias : tokensDefs) {
if (g.getTokenType(alias.getText()) != Token.INVALID_TYPE) {
g.tool.errMgr.grammarError(ErrorType.TOKEN_NAME_REASSIGNMENT, g.fileName, alias.token, alias.getText());
}
g.defineTokenName(alias.getText());
}
// DEFINE TOKEN TYPES FOR TOKEN REFS LIKE ID, INT
for (GrammarAST idAST : tokenIDs) {
if (g.getTokenType(idAST.getText()) == Token.INVALID_TYPE) {
g.tool.errMgr.grammarError(ErrorType.IMPLICIT_TOKEN_DEFINITION, g.fileName, idAST.token, idAST.getText());
}
g.defineTokenName(idAST.getText());
}
// VERIFY TOKEN TYPES FOR STRING LITERAL REFS LIKE 'while', ';'
for (GrammarAST termAST : terminals) {
if (termAST.getType() != ANTLRParser.STRING_LITERAL) {
continue;
}
if (g.getTokenType(termAST.getText()) == Token.INVALID_TYPE) {
g.tool.errMgr.grammarError(ErrorType.IMPLICIT_STRING_DEFINITION, g.fileName, termAST.token, termAST.getText());
}
}
g.tool.log("semantics", "tokens=" + g.tokenNameToTypeMap);
g.tool.log("semantics", "strings=" + g.stringLiteralToTypeMap);
}
use of org.antlr.v4.runtime.Token in project antlr4 by antlr.
the class TestRig method process.
protected void process(Lexer lexer, Class<? extends Parser> parserClass, Parser parser, CharStream input) throws IOException, IllegalAccessException, InvocationTargetException, PrintException {
lexer.setInputStream(input);
CommonTokenStream tokens = new CommonTokenStream(lexer);
tokens.fill();
if (showTokens) {
for (Token tok : tokens.getTokens()) {
if (tok instanceof CommonToken) {
System.out.println(((CommonToken) tok).toString(lexer));
} else {
System.out.println(tok.toString());
}
}
}
if (startRuleName.equals(LEXER_START_RULE_NAME))
return;
if (diagnostics) {
parser.addErrorListener(new DiagnosticErrorListener());
parser.getInterpreter().setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION);
}
if (printTree || gui || psFile != null) {
parser.setBuildParseTree(true);
}
if (SLL) {
// overrides diagnostics
parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
}
parser.setTokenStream(tokens);
parser.setTrace(trace);
try {
Method startRule = parserClass.getMethod(startRuleName);
ParserRuleContext tree = (ParserRuleContext) startRule.invoke(parser, (Object[]) null);
if (printTree) {
System.out.println(tree.toStringTree(parser));
}
if (gui) {
Trees.inspect(tree, parser);
}
if (psFile != null) {
// Generate postscript
Trees.save(tree, parser, psFile);
}
} catch (NoSuchMethodException nsme) {
System.err.println("No method for rule " + startRuleName + " or it has arguments");
}
}
Aggregations