use of org.antlr.v4.tool.Rule in project antlr4 by tunnelvisionlabs.
the class Tool method generateATNs.
public void generateATNs(Grammar g) {
DOTGenerator dotGenerator = new DOTGenerator(g);
List<Grammar> grammars = new ArrayList<Grammar>();
grammars.add(g);
List<Grammar> imported = g.getAllImportedGrammars();
if (imported != null)
grammars.addAll(imported);
for (Grammar ig : grammars) {
for (Rule r : ig.rules.values()) {
try {
String dot = dotGenerator.getDOT(g.atn.ruleToStartState[r.index], g.isLexer());
if (dot != null) {
writeDOTFile(g, r, dot);
}
} catch (IOException ioe) {
errMgr.toolError(ErrorType.CANNOT_WRITE_FILE, ioe);
}
}
}
}
use of org.antlr.v4.tool.Rule in project antlr4 by tunnelvisionlabs.
the class Tool method createGrammar.
/**
* Given the raw AST of a grammar, create a grammar object
* associated with the AST. Once we have the grammar object, ensure
* that all nodes in tree referred to this grammar. Later, we will
* use it for error handling and generally knowing from where a rule
* comes from.
*/
public Grammar createGrammar(GrammarRootAST ast) {
final Grammar g;
if (ast.grammarType == ANTLRParser.LEXER)
g = new LexerGrammar(this, ast);
else
g = new Grammar(this, ast);
// ensure each node has pointer to surrounding grammar
GrammarTransformPipeline.setGrammarPtr(g, ast);
return g;
}
use of org.antlr.v4.tool.Rule in project antlr4 by tunnelvisionlabs.
the class AnalysisPipeline method processLexer.
protected void processLexer() {
// make sure all non-fragment lexer rules must match at least one symbol
for (Rule rule : g.rules.values()) {
if (rule.isFragment()) {
continue;
}
LL1Analyzer analyzer = new LL1Analyzer(g.atn);
IntervalSet look = analyzer.LOOK(g.atn.ruleToStartState[rule.index], PredictionContext.EMPTY_LOCAL);
if (look.contains(Token.EPSILON)) {
g.tool.errMgr.grammarError(ErrorType.EPSILON_TOKEN, g.fileName, ((GrammarAST) rule.ast.getChild(0)).getToken(), rule.name);
}
}
}
use of org.antlr.v4.tool.Rule in project antlr4 by tunnelvisionlabs.
the class LeftFactoringRuleTransformer method createLeftFactoredRuleVariant.
protected RuleVariants createLeftFactoredRuleVariant(Rule rule, String factoredElement) {
RuleAST ast = (RuleAST) rule.ast.dupTree();
BlockAST block = (BlockAST) ast.getFirstChildWithType(ANTLRParser.BLOCK);
RuleAST unfactoredAst = null;
BlockAST unfactoredBlock = null;
if (translateLeftFactoredDecision(block, factoredElement, true, DecisionFactorMode.FULL_FACTOR, false)) {
// all alternatives factored
} else {
ast = (RuleAST) rule.ast.dupTree();
block = (BlockAST) ast.getFirstChildWithType(ANTLRParser.BLOCK);
if (!translateLeftFactoredDecision(block, factoredElement, true, DecisionFactorMode.PARTIAL_FACTORED, false)) {
// no left factored alts
return RuleVariants.NONE;
}
unfactoredAst = (RuleAST) rule.ast.dupTree();
unfactoredBlock = (BlockAST) unfactoredAst.getFirstChildWithType(ANTLRParser.BLOCK);
if (!translateLeftFactoredDecision(unfactoredBlock, factoredElement, true, DecisionFactorMode.PARTIAL_UNFACTORED, false)) {
throw new IllegalStateException("expected unfactored alts for partial factorization");
}
}
/*
* factored elements
*/
{
String variantName = ast.getChild(0).getText() + ATNSimulator.RULE_LF_VARIANT_MARKER + factoredElement;
((GrammarAST) ast.getChild(0)).token = adaptor.createToken(ast.getChild(0).getType(), variantName);
GrammarAST ruleParent = (GrammarAST) rule.ast.getParent();
ruleParent.insertChild(rule.ast.getChildIndex() + 1, ast);
ruleParent.freshenParentAndChildIndexes(rule.ast.getChildIndex());
List<GrammarAST> alts = block.getAllChildrenWithType(ANTLRParser.ALT);
Rule variant = new Rule(_g, ast.getChild(0).getText(), ast, alts.size());
_g.defineRule(variant);
for (int i = 0; i < alts.size(); i++) {
variant.alt[i + 1].ast = (AltAST) alts.get(i);
}
}
/*
* unfactored elements
*/
if (unfactoredAst != null) {
String variantName = unfactoredAst.getChild(0).getText() + ATNSimulator.RULE_NOLF_VARIANT_MARKER + factoredElement;
((GrammarAST) unfactoredAst.getChild(0)).token = adaptor.createToken(unfactoredAst.getChild(0).getType(), variantName);
GrammarAST ruleParent = (GrammarAST) rule.ast.getParent();
ruleParent.insertChild(rule.ast.getChildIndex() + 1, unfactoredAst);
ruleParent.freshenParentAndChildIndexes(rule.ast.getChildIndex());
List<GrammarAST> alts = unfactoredBlock.getAllChildrenWithType(ANTLRParser.ALT);
Rule variant = new Rule(_g, unfactoredAst.getChild(0).getText(), unfactoredAst, alts.size());
_g.defineRule(variant);
for (int i = 0; i < alts.size(); i++) {
variant.alt[i + 1].ast = (AltAST) alts.get(i);
}
}
/*
* result
*/
return unfactoredAst == null ? RuleVariants.FULLY_FACTORED : RuleVariants.PARTIALLY_FACTORED;
}
use of org.antlr.v4.tool.Rule in project antlr4 by tunnelvisionlabs.
the class LeftFactoringRuleTransformer method translateLeftFactoredDecision.
protected boolean translateLeftFactoredDecision(GrammarAST block, String factoredRule, boolean variant, DecisionFactorMode mode, boolean includeFactoredElement) {
if (mode == DecisionFactorMode.PARTIAL_UNFACTORED && includeFactoredElement) {
throw new IllegalArgumentException("Cannot include the factored element in unfactored alternatives.");
} else if (mode == DecisionFactorMode.COMBINED_FACTOR && !includeFactoredElement) {
throw new IllegalArgumentException("Cannot return a combined answer without the factored element.");
}
if (!expandOptionalQuantifiersForBlock(block, variant)) {
return false;
}
List<GrammarAST> alternatives = block.getAllChildrenWithType(ANTLRParser.ALT);
GrammarAST[] factoredAlternatives = new GrammarAST[alternatives.size()];
GrammarAST[] unfactoredAlternatives = new GrammarAST[alternatives.size()];
IntervalSet factoredIntervals = new IntervalSet();
IntervalSet unfactoredIntervals = new IntervalSet();
for (int i = 0; i < alternatives.size(); i++) {
GrammarAST alternative = alternatives.get(i);
if (mode.includeUnfactoredAlts()) {
GrammarAST unfactoredAlt = translateLeftFactoredAlternative(alternative.dupTree(), factoredRule, variant, DecisionFactorMode.PARTIAL_UNFACTORED, false);
unfactoredAlternatives[i] = unfactoredAlt;
if (unfactoredAlt != null) {
unfactoredIntervals.add(i);
}
}
if (mode.includeFactoredAlts()) {
GrammarAST factoredAlt = translateLeftFactoredAlternative(alternative, factoredRule, variant, mode == DecisionFactorMode.COMBINED_FACTOR ? DecisionFactorMode.PARTIAL_FACTORED : DecisionFactorMode.FULL_FACTOR, includeFactoredElement);
factoredAlternatives[i] = factoredAlt;
if (factoredAlt != null) {
factoredIntervals.add(alternative.getChildIndex());
}
}
}
if (factoredIntervals.isNil() && !mode.includeUnfactoredAlts()) {
return false;
} else if (unfactoredIntervals.isNil() && !mode.includeFactoredAlts()) {
return false;
}
if (unfactoredIntervals.isNil() && factoredIntervals.size() == alternatives.size() && mode.includeFactoredAlts() && !includeFactoredElement) {
for (int i = 0; i < factoredAlternatives.length; i++) {
GrammarAST translatedAlt = factoredAlternatives[i];
if (translatedAlt.getChildCount() == 0) {
adaptor.addChild(translatedAlt, adaptor.create(ANTLRParser.EPSILON, "EPSILON"));
}
adaptor.setChild(block, i, translatedAlt);
}
return true;
} else if (factoredIntervals.isNil() && unfactoredIntervals.size() == alternatives.size() && mode.includeUnfactoredAlts()) {
for (int i = 0; i < unfactoredAlternatives.length; i++) {
GrammarAST translatedAlt = unfactoredAlternatives[i];
if (translatedAlt.getChildCount() == 0) {
adaptor.addChild(translatedAlt, adaptor.create(ANTLRParser.EPSILON, "EPSILON"));
}
adaptor.setChild(block, i, translatedAlt);
}
return true;
}
if (mode == DecisionFactorMode.FULL_FACTOR) {
return false;
}
/* for a, b, c being arbitrary `element` trees, this block performs
* this transformation:
*
* factoredElement a
* | factoredElement b
* | factoredElement c
* | ...
*
* ==>
*
* factoredElement (a | b | c | ...)
*/
GrammarAST newChildren = adaptor.nil();
for (int i = 0; i < alternatives.size(); i++) {
if (mode.includeFactoredAlts() && factoredIntervals.contains(i)) {
boolean combineWithPrevious = i > 0 && factoredIntervals.contains(i - 1) && (!mode.includeUnfactoredAlts() || !unfactoredIntervals.contains(i - 1));
if (combineWithPrevious) {
GrammarAST translatedAlt = factoredAlternatives[i];
if (translatedAlt.getChildCount() == 0) {
adaptor.addChild(translatedAlt, adaptor.create(ANTLRParser.EPSILON, "EPSILON"));
}
GrammarAST previous = (GrammarAST) newChildren.getChild(newChildren.getChildCount() - 1);
if (LOGGER.isLoggable(Level.FINE)) {
LOGGER.log(Level.FINE, previous.toStringTree());
LOGGER.log(Level.FINE, translatedAlt.toStringTree());
}
if (previous.getChildCount() == 1 || previous.getChild(1).getType() != ANTLRParser.BLOCK) {
GrammarAST newBlock = new BlockAST(adaptor.createToken(ANTLRParser.BLOCK, "BLOCK"));
GrammarAST newAlt = new AltAST(adaptor.createToken(ANTLRParser.ALT, "ALT"));
adaptor.addChild(newBlock, newAlt);
while (previous.getChildCount() > 1) {
adaptor.addChild(newAlt, previous.deleteChild(1));
}
if (newAlt.getChildCount() == 0) {
adaptor.addChild(newAlt, adaptor.create(ANTLRParser.EPSILON, "EPSILON"));
}
adaptor.addChild(previous, newBlock);
}
if (translatedAlt.getChildCount() == 1 || translatedAlt.getChild(1).getType() != ANTLRParser.BLOCK) {
GrammarAST newBlock = new BlockAST(adaptor.createToken(ANTLRParser.BLOCK, "BLOCK"));
GrammarAST newAlt = new AltAST(adaptor.createToken(ANTLRParser.ALT, "ALT"));
adaptor.addChild(newBlock, newAlt);
while (translatedAlt.getChildCount() > 1) {
adaptor.addChild(newAlt, translatedAlt.deleteChild(1));
}
if (newAlt.getChildCount() == 0) {
adaptor.addChild(newAlt, adaptor.create(ANTLRParser.EPSILON, "EPSILON"));
}
adaptor.addChild(translatedAlt, newBlock);
}
GrammarAST combinedBlock = (GrammarAST) previous.getChild(1);
adaptor.addChild(combinedBlock, translatedAlt.getChild(1).getChild(0));
if (LOGGER.isLoggable(Level.FINE)) {
LOGGER.log(Level.FINE, previous.toStringTree());
}
} else {
GrammarAST translatedAlt = factoredAlternatives[i];
if (translatedAlt.getChildCount() == 0) {
adaptor.addChild(translatedAlt, adaptor.create(ANTLRParser.EPSILON, "EPSILON"));
}
adaptor.addChild(newChildren, translatedAlt);
}
}
if (mode.includeUnfactoredAlts() && unfactoredIntervals.contains(i)) {
GrammarAST translatedAlt = unfactoredAlternatives[i];
if (translatedAlt.getChildCount() == 0) {
adaptor.addChild(translatedAlt, adaptor.create(ANTLRParser.EPSILON, "EPSILON"));
}
adaptor.addChild(newChildren, translatedAlt);
}
}
adaptor.replaceChildren(block, 0, block.getChildCount() - 1, newChildren);
if (!variant && block.getParent() instanceof RuleAST) {
RuleAST ruleAST = (RuleAST) block.getParent();
String ruleName = ruleAST.getChild(0).getText();
Rule r = _rules.get(ruleName);
List<GrammarAST> blockAlts = block.getAllChildrenWithType(ANTLRParser.ALT);
r.numberOfAlts = blockAlts.size();
r.alt = new Alternative[blockAlts.size() + 1];
for (int i = 0; i < blockAlts.size(); i++) {
r.alt[i + 1] = new Alternative(r, i + 1);
r.alt[i + 1].ast = (AltAST) blockAlts.get(i);
}
}
return true;
}
Aggregations