use of org.antlr.v4.runtime.atn.ATN in project antlr4 by antlr.
the class TestPerformance method compileJdk.
@Test
@org.junit.Ignore
public void compileJdk() throws IOException, InterruptedException, ExecutionException {
String jdkSourceRoot = getSourceRoot("JDK");
assertTrue("The JDK_SOURCE_ROOT environment variable must be set for performance testing.", jdkSourceRoot != null && !jdkSourceRoot.isEmpty());
compileJavaParser(USE_LR_GRAMMAR);
final String lexerName = USE_LR_GRAMMAR ? "JavaLRLexer" : "JavaLexer";
final String parserName = USE_LR_GRAMMAR ? "JavaLRParser" : "JavaParser";
final String listenerName = USE_LR_GRAMMAR ? "JavaLRBaseListener" : "JavaBaseListener";
final String entryPoint = "compilationUnit";
final ParserFactory factory = getParserFactory(lexerName, parserName, listenerName, entryPoint);
if (!TOP_PACKAGE.isEmpty()) {
jdkSourceRoot = jdkSourceRoot + '/' + TOP_PACKAGE.replace('.', '/');
}
File directory = new File(jdkSourceRoot);
assertTrue(directory.isDirectory());
FilenameFilter filesFilter = FilenameFilters.extension(".java", false);
FilenameFilter directoriesFilter = FilenameFilters.ALL_FILES;
final List<InputDescriptor> sources = loadSources(directory, filesFilter, directoriesFilter, RECURSIVE);
for (int i = 0; i < PASSES; i++) {
if (COMPUTE_TRANSITION_STATS) {
totalTransitionsPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)];
computedTransitionsPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)];
if (DETAILED_DFA_STATE_STATS) {
decisionInvocationsPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
fullContextFallbackPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
nonSllPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
totalTransitionsPerDecisionPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
computedTransitionsPerDecisionPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
fullContextTransitionsPerDecisionPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
}
}
if (COMPUTE_TIMING_STATS) {
timePerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)];
tokensPerFile[i] = new int[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)];
}
}
System.out.format("Located %d source files.%n", sources.size());
System.out.print(getOptionsDescription(TOP_PACKAGE));
ExecutorService executorService = Executors.newFixedThreadPool(FILE_GRANULARITY ? 1 : NUMBER_OF_THREADS, new NumberedThreadFactory());
List<Future<?>> passResults = new ArrayList<Future<?>>();
passResults.add(executorService.submit(new Runnable() {
@Override
public void run() {
try {
parse1(0, factory, sources, SHUFFLE_FILES_AT_START);
} catch (InterruptedException ex) {
Logger.getLogger(TestPerformance.class.getName()).log(Level.SEVERE, null, ex);
}
}
}));
for (int i = 0; i < PASSES - 1; i++) {
final int currentPass = i + 1;
passResults.add(executorService.submit(new Runnable() {
@Override
public void run() {
if (CLEAR_DFA) {
int index = FILE_GRANULARITY ? 0 : ((NumberedThread) Thread.currentThread()).getThreadNumber();
if (sharedLexers.length > 0 && sharedLexers[index] != null) {
ATN atn = sharedLexers[index].getATN();
for (int j = 0; j < sharedLexers[index].getInterpreter().decisionToDFA.length; j++) {
sharedLexers[index].getInterpreter().decisionToDFA[j] = new DFA(atn.getDecisionState(j), j);
}
}
if (sharedParsers.length > 0 && sharedParsers[index] != null) {
ATN atn = sharedParsers[index].getATN();
for (int j = 0; j < sharedParsers[index].getInterpreter().decisionToDFA.length; j++) {
sharedParsers[index].getInterpreter().decisionToDFA[j] = new DFA(atn.getDecisionState(j), j);
}
}
if (FILE_GRANULARITY) {
Arrays.fill(sharedLexers, null);
Arrays.fill(sharedParsers, null);
}
}
try {
parse2(currentPass, factory, sources, SHUFFLE_FILES_AFTER_ITERATIONS);
} catch (InterruptedException ex) {
Logger.getLogger(TestPerformance.class.getName()).log(Level.SEVERE, null, ex);
}
}
}));
}
for (Future<?> passResult : passResults) {
passResult.get();
}
executorService.shutdown();
executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
if (COMPUTE_TRANSITION_STATS && SHOW_TRANSITION_STATS_PER_FILE) {
computeTransitionStatistics();
}
if (COMPUTE_TIMING_STATS) {
computeTimingStatistics();
}
sources.clear();
if (PAUSE_FOR_HEAP_DUMP) {
System.gc();
System.out.println("Pausing before application exit.");
try {
Thread.sleep(4000);
} catch (InterruptedException ex) {
Logger.getLogger(TestPerformance.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
use of org.antlr.v4.runtime.atn.ATN in project antlr4 by antlr.
the class BasePythonTest method getTokenTypes.
public List<String> getTokenTypes(LexerGrammar lg, ATN atn, CharStream input) {
LexerATNSimulator interp = new LexerATNSimulator(atn, new DFA[] { new DFA(atn.modeToStartState.get(Lexer.DEFAULT_MODE)) }, null);
List<String> tokenTypes = new ArrayList<String>();
int ttype;
boolean hitEOF = false;
do {
if (hitEOF) {
tokenTypes.add("EOF");
break;
}
int t = input.LA(1);
ttype = interp.match(input, Lexer.DEFAULT_MODE);
if (ttype == Token.EOF) {
tokenTypes.add("EOF");
} else {
tokenTypes.add(lg.typeToTokenList.get(ttype));
}
if (t == IntStream.EOF) {
hitEOF = true;
}
} while (ttype != Token.EOF);
return tokenTypes;
}
use of org.antlr.v4.runtime.atn.ATN in project antlr4 by antlr.
the class BasePythonTest method checkRuleATN.
void checkRuleATN(Grammar g, String ruleName, String expecting) {
ParserATNFactory f = new ParserATNFactory(g);
ATN atn = f.createATN();
DOTGenerator dot = new DOTGenerator(g);
System.out.println(dot.getDOT(atn.ruleToStartState[g.getRule(ruleName).index]));
Rule r = g.getRule(ruleName);
ATNState startState = atn.ruleToStartState[r.index];
ATNPrinter serializer = new ATNPrinter(g, startState);
String result = serializer.asString();
//System.out.print(result);
assertEquals(expecting, result);
}
use of org.antlr.v4.runtime.atn.ATN in project antlr4 by antlr.
the class BaseBrowserTest method createATN.
protected ATN createATN(Grammar g, boolean useSerializer) {
if (g.atn == null) {
semanticProcess(g);
assertEquals(0, g.tool.getNumErrors());
ParserATNFactory f;
if (g.isLexer()) {
f = new LexerATNFactory((LexerGrammar) g);
} else {
f = new ParserATNFactory(g);
}
g.atn = f.createATN();
assertEquals(0, g.tool.getNumErrors());
}
ATN atn = g.atn;
if (useSerializer) {
char[] serialized = ATNSerializer.getSerializedAsChars(atn);
return new ATNDeserializer().deserialize(serialized);
}
return atn;
}
use of org.antlr.v4.runtime.atn.ATN in project antlr4 by antlr.
the class TestExpectedTokens method testFollowIncludedInLeftRecursiveRule.
// Test for https://github.com/antlr/antlr4/issues/1480
// can't reproduce
@Test
public void testFollowIncludedInLeftRecursiveRule() throws Exception {
String gtext = "grammar T;\n" + "s : expr EOF ;\n" + "expr : L expr R\n" + " | expr PLUS expr\n" + " | ID\n" + " ;\n";
Grammar g = new Grammar(gtext);
String atnText = "RuleStart_expr_2->BlockStart_13\n" + "BlockStart_13->s7\n" + "BlockStart_13->s12\n" + "s7-action_1:-1->s8\n" + "s12-ID->BlockEnd_14\n" + "s8-L->s9\n" + "BlockEnd_14->StarLoopEntry_20\n" + "s9-expr->RuleStart_expr_2\n" + "StarLoopEntry_20->StarBlockStart_18\n" + "StarLoopEntry_20->s21\n" + "s10-R->s11\n" + "StarBlockStart_18->s15\n" + "s21->RuleStop_expr_3\n" + "s11->BlockEnd_14\n" + "s15-2 >= _p->s16\n" + "RuleStop_expr_3->s5\n" + "RuleStop_expr_3->s10\n" + "RuleStop_expr_3->BlockEnd_19\n" + "s16-PLUS->s17\n" + "s17-expr->RuleStart_expr_2\n" + "BlockEnd_19->StarLoopBack_22\n" + "StarLoopBack_22->StarLoopEntry_20\n";
checkRuleATN(g, "expr", atnText);
ATN atn = g.getATN();
// DOTGenerator gen = new DOTGenerator(g);
// String dot = gen.getDOT(atn.states.get(2), g.getRuleNames(), false);
// System.out.println(dot);
// Simulate call stack after input '(x' from rule s
ParserRuleContext callStackFrom_s = new ParserRuleContext(null, 4);
ParserRuleContext callStackFrom_expr = new ParserRuleContext(callStackFrom_s, 9);
int afterID = 14;
IntervalSet tokens = atn.getExpectedTokens(afterID, callStackFrom_expr);
assertEquals("{R, PLUS}", tokens.toString(g.getTokenNames()));
// Simulate call stack after input '(x' from within rule expr
callStackFrom_expr = new ParserRuleContext(null, 9);
tokens = atn.getExpectedTokens(afterID, callStackFrom_expr);
assertEquals("{R, PLUS}", tokens.toString(g.getTokenNames()));
}
Aggregations