use of org.antlr.v4.runtime.dfa.DFA in project antlr4 by antlr.
the class TestPerformance method getParserFactory.
protected ParserFactory getParserFactory(String lexerName, String parserName, String listenerName, final String entryPoint) {
try {
ClassLoader loader = new URLClassLoader(new URL[] { getTempTestDir().toURI().toURL() }, ClassLoader.getSystemClassLoader());
final Class<? extends Lexer> lexerClass = loader.loadClass(lexerName).asSubclass(Lexer.class);
final Class<? extends Parser> parserClass = loader.loadClass(parserName).asSubclass(Parser.class);
final Class<? extends ParseTreeListener> listenerClass = loader.loadClass(listenerName).asSubclass(ParseTreeListener.class);
final Constructor<? extends Lexer> lexerCtor = lexerClass.getConstructor(CharStream.class);
final Constructor<? extends Parser> parserCtor = parserClass.getConstructor(TokenStream.class);
// construct initial instances of the lexer and parser to deserialize their ATNs
TokenSource tokenSource = lexerCtor.newInstance(new ANTLRInputStream(""));
parserCtor.newInstance(new CommonTokenStream(tokenSource));
return new ParserFactory() {
@Override
public FileParseResult parseFile(CharStream input, int currentPass, int thread) {
final MurmurHashChecksum checksum = new MurmurHashChecksum();
final long startTime = System.nanoTime();
assert thread >= 0 && thread < NUMBER_OF_THREADS;
try {
ParseTreeListener listener = sharedListeners[thread];
if (listener == null) {
listener = listenerClass.newInstance();
sharedListeners[thread] = listener;
}
Lexer lexer = sharedLexers[thread];
if (REUSE_LEXER && lexer != null) {
lexer.setInputStream(input);
} else {
Lexer previousLexer = lexer;
lexer = lexerCtor.newInstance(input);
DFA[] decisionToDFA = (FILE_GRANULARITY || previousLexer == null ? lexer : previousLexer).getInterpreter().decisionToDFA;
if (!REUSE_LEXER_DFA || (!FILE_GRANULARITY && previousLexer == null)) {
decisionToDFA = new DFA[decisionToDFA.length];
}
if (COMPUTE_TRANSITION_STATS) {
lexer.setInterpreter(new StatisticsLexerATNSimulator(lexer, lexer.getATN(), decisionToDFA, lexer.getInterpreter().getSharedContextCache()));
} else if (!REUSE_LEXER_DFA) {
lexer.setInterpreter(new LexerATNSimulator(lexer, lexer.getATN(), decisionToDFA, lexer.getInterpreter().getSharedContextCache()));
}
sharedLexers[thread] = lexer;
}
lexer.removeErrorListeners();
lexer.addErrorListener(DescriptiveErrorListener.INSTANCE);
if (lexer.getInterpreter().decisionToDFA[0] == null) {
ATN atn = lexer.getATN();
for (int i = 0; i < lexer.getInterpreter().decisionToDFA.length; i++) {
lexer.getInterpreter().decisionToDFA[i] = new DFA(atn.getDecisionState(i), i);
}
}
CommonTokenStream tokens = new CommonTokenStream(lexer);
tokens.fill();
tokenCount.addAndGet(currentPass, tokens.size());
if (COMPUTE_CHECKSUM) {
for (Token token : tokens.getTokens()) {
updateChecksum(checksum, token);
}
}
if (!RUN_PARSER) {
return new FileParseResult(input.getSourceName(), (int) checksum.getValue(), null, tokens.size(), startTime, lexer, null);
}
final long parseStartTime = System.nanoTime();
Parser parser = sharedParsers[thread];
if (REUSE_PARSER && parser != null) {
parser.setInputStream(tokens);
} else {
Parser previousParser = parser;
if (USE_PARSER_INTERPRETER) {
Parser referenceParser = parserCtor.newInstance(tokens);
parser = new ParserInterpreter(referenceParser.getGrammarFileName(), referenceParser.getVocabulary(), Arrays.asList(referenceParser.getRuleNames()), referenceParser.getATN(), tokens);
} else {
parser = parserCtor.newInstance(tokens);
}
DFA[] decisionToDFA = (FILE_GRANULARITY || previousParser == null ? parser : previousParser).getInterpreter().decisionToDFA;
if (!REUSE_PARSER_DFA || (!FILE_GRANULARITY && previousParser == null)) {
decisionToDFA = new DFA[decisionToDFA.length];
}
if (COMPUTE_TRANSITION_STATS) {
parser.setInterpreter(new StatisticsParserATNSimulator(parser, parser.getATN(), decisionToDFA, parser.getInterpreter().getSharedContextCache()));
} else if (!REUSE_PARSER_DFA) {
parser.setInterpreter(new ParserATNSimulator(parser, parser.getATN(), decisionToDFA, parser.getInterpreter().getSharedContextCache()));
}
sharedParsers[thread] = parser;
}
parser.removeParseListeners();
parser.removeErrorListeners();
if (!TWO_STAGE_PARSING) {
parser.addErrorListener(DescriptiveErrorListener.INSTANCE);
parser.addErrorListener(new SummarizingDiagnosticErrorListener());
}
if (parser.getInterpreter().decisionToDFA[0] == null) {
ATN atn = parser.getATN();
for (int i = 0; i < parser.getInterpreter().decisionToDFA.length; i++) {
parser.getInterpreter().decisionToDFA[i] = new DFA(atn.getDecisionState(i), i);
}
}
parser.getInterpreter().setPredictionMode(TWO_STAGE_PARSING ? PredictionMode.SLL : PREDICTION_MODE);
parser.setBuildParseTree(BUILD_PARSE_TREES);
if (!BUILD_PARSE_TREES && BLANK_LISTENER) {
parser.addParseListener(listener);
}
if (BAIL_ON_ERROR || TWO_STAGE_PARSING) {
parser.setErrorHandler(new BailErrorStrategy());
}
Method parseMethod = parserClass.getMethod(entryPoint);
Object parseResult;
try {
if (COMPUTE_CHECKSUM && !BUILD_PARSE_TREES) {
parser.addParseListener(new ChecksumParseTreeListener(checksum));
}
if (USE_PARSER_INTERPRETER) {
ParserInterpreter parserInterpreter = (ParserInterpreter) parser;
parseResult = parserInterpreter.parse(Collections.lastIndexOfSubList(Arrays.asList(parser.getRuleNames()), Collections.singletonList(entryPoint)));
} else {
parseResult = parseMethod.invoke(parser);
}
} catch (InvocationTargetException ex) {
if (!TWO_STAGE_PARSING) {
throw ex;
}
String sourceName = tokens.getSourceName();
sourceName = sourceName != null && !sourceName.isEmpty() ? sourceName + ": " : "";
if (REPORT_SECOND_STAGE_RETRY) {
System.err.println(sourceName + "Forced to retry with full context.");
}
if (!(ex.getCause() instanceof ParseCancellationException)) {
throw ex;
}
tokens.seek(0);
if (REUSE_PARSER && parser != null) {
parser.setInputStream(tokens);
} else {
Parser previousParser = parser;
if (USE_PARSER_INTERPRETER) {
Parser referenceParser = parserCtor.newInstance(tokens);
parser = new ParserInterpreter(referenceParser.getGrammarFileName(), referenceParser.getVocabulary(), Arrays.asList(referenceParser.getRuleNames()), referenceParser.getATN(), tokens);
} else {
parser = parserCtor.newInstance(tokens);
}
DFA[] decisionToDFA = previousParser.getInterpreter().decisionToDFA;
if (COMPUTE_TRANSITION_STATS) {
parser.setInterpreter(new StatisticsParserATNSimulator(parser, parser.getATN(), decisionToDFA, parser.getInterpreter().getSharedContextCache()));
} else if (!REUSE_PARSER_DFA) {
parser.setInterpreter(new ParserATNSimulator(parser, parser.getATN(), decisionToDFA, parser.getInterpreter().getSharedContextCache()));
}
sharedParsers[thread] = parser;
}
parser.removeParseListeners();
parser.removeErrorListeners();
parser.addErrorListener(DescriptiveErrorListener.INSTANCE);
parser.addErrorListener(new SummarizingDiagnosticErrorListener());
parser.getInterpreter().setPredictionMode(PredictionMode.LL);
parser.setBuildParseTree(BUILD_PARSE_TREES);
if (COMPUTE_CHECKSUM && !BUILD_PARSE_TREES) {
parser.addParseListener(new ChecksumParseTreeListener(checksum));
}
if (!BUILD_PARSE_TREES && BLANK_LISTENER) {
parser.addParseListener(listener);
}
if (BAIL_ON_ERROR) {
parser.setErrorHandler(new BailErrorStrategy());
}
parseResult = parseMethod.invoke(parser);
}
assertThat(parseResult, instanceOf(ParseTree.class));
if (COMPUTE_CHECKSUM && BUILD_PARSE_TREES) {
ParseTreeWalker.DEFAULT.walk(new ChecksumParseTreeListener(checksum), (ParseTree) parseResult);
}
if (BUILD_PARSE_TREES && BLANK_LISTENER) {
ParseTreeWalker.DEFAULT.walk(listener, (ParseTree) parseResult);
}
return new FileParseResult(input.getSourceName(), (int) checksum.getValue(), (ParseTree) parseResult, tokens.size(), TIME_PARSE_ONLY ? parseStartTime : startTime, lexer, parser);
} catch (Exception e) {
if (!REPORT_SYNTAX_ERRORS && e instanceof ParseCancellationException) {
return new FileParseResult("unknown", (int) checksum.getValue(), null, 0, startTime, null, null);
}
e.printStackTrace(System.out);
throw new IllegalStateException(e);
}
}
};
} catch (Exception e) {
e.printStackTrace(System.out);
Assert.fail(e.getMessage());
throw new IllegalStateException(e);
}
}
use of org.antlr.v4.runtime.dfa.DFA in project antlr4 by antlr.
the class TestPerformance method compileJdk.
@Test
@org.junit.Ignore
public void compileJdk() throws IOException, InterruptedException, ExecutionException {
String jdkSourceRoot = getSourceRoot("JDK");
assertTrue("The JDK_SOURCE_ROOT environment variable must be set for performance testing.", jdkSourceRoot != null && !jdkSourceRoot.isEmpty());
compileJavaParser(USE_LR_GRAMMAR);
final String lexerName = USE_LR_GRAMMAR ? "JavaLRLexer" : "JavaLexer";
final String parserName = USE_LR_GRAMMAR ? "JavaLRParser" : "JavaParser";
final String listenerName = USE_LR_GRAMMAR ? "JavaLRBaseListener" : "JavaBaseListener";
final String entryPoint = "compilationUnit";
final ParserFactory factory = getParserFactory(lexerName, parserName, listenerName, entryPoint);
if (!TOP_PACKAGE.isEmpty()) {
jdkSourceRoot = jdkSourceRoot + '/' + TOP_PACKAGE.replace('.', '/');
}
File directory = new File(jdkSourceRoot);
assertTrue(directory.isDirectory());
FilenameFilter filesFilter = FilenameFilters.extension(".java", false);
FilenameFilter directoriesFilter = FilenameFilters.ALL_FILES;
final List<InputDescriptor> sources = loadSources(directory, filesFilter, directoriesFilter, RECURSIVE);
for (int i = 0; i < PASSES; i++) {
if (COMPUTE_TRANSITION_STATS) {
totalTransitionsPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)];
computedTransitionsPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)];
if (DETAILED_DFA_STATE_STATS) {
decisionInvocationsPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
fullContextFallbackPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
nonSllPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
totalTransitionsPerDecisionPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
computedTransitionsPerDecisionPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
fullContextTransitionsPerDecisionPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
}
}
if (COMPUTE_TIMING_STATS) {
timePerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)];
tokensPerFile[i] = new int[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)];
}
}
System.out.format("Located %d source files.%n", sources.size());
System.out.print(getOptionsDescription(TOP_PACKAGE));
ExecutorService executorService = Executors.newFixedThreadPool(FILE_GRANULARITY ? 1 : NUMBER_OF_THREADS, new NumberedThreadFactory());
List<Future<?>> passResults = new ArrayList<Future<?>>();
passResults.add(executorService.submit(new Runnable() {
@Override
public void run() {
try {
parse1(0, factory, sources, SHUFFLE_FILES_AT_START);
} catch (InterruptedException ex) {
Logger.getLogger(TestPerformance.class.getName()).log(Level.SEVERE, null, ex);
}
}
}));
for (int i = 0; i < PASSES - 1; i++) {
final int currentPass = i + 1;
passResults.add(executorService.submit(new Runnable() {
@Override
public void run() {
if (CLEAR_DFA) {
int index = FILE_GRANULARITY ? 0 : ((NumberedThread) Thread.currentThread()).getThreadNumber();
if (sharedLexers.length > 0 && sharedLexers[index] != null) {
ATN atn = sharedLexers[index].getATN();
for (int j = 0; j < sharedLexers[index].getInterpreter().decisionToDFA.length; j++) {
sharedLexers[index].getInterpreter().decisionToDFA[j] = new DFA(atn.getDecisionState(j), j);
}
}
if (sharedParsers.length > 0 && sharedParsers[index] != null) {
ATN atn = sharedParsers[index].getATN();
for (int j = 0; j < sharedParsers[index].getInterpreter().decisionToDFA.length; j++) {
sharedParsers[index].getInterpreter().decisionToDFA[j] = new DFA(atn.getDecisionState(j), j);
}
}
if (FILE_GRANULARITY) {
Arrays.fill(sharedLexers, null);
Arrays.fill(sharedParsers, null);
}
}
try {
parse2(currentPass, factory, sources, SHUFFLE_FILES_AFTER_ITERATIONS);
} catch (InterruptedException ex) {
Logger.getLogger(TestPerformance.class.getName()).log(Level.SEVERE, null, ex);
}
}
}));
}
for (Future<?> passResult : passResults) {
passResult.get();
}
executorService.shutdown();
executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
if (COMPUTE_TRANSITION_STATS && SHOW_TRANSITION_STATS_PER_FILE) {
computeTransitionStatistics();
}
if (COMPUTE_TIMING_STATS) {
computeTimingStatistics();
}
sources.clear();
if (PAUSE_FOR_HEAP_DUMP) {
System.gc();
System.out.println("Pausing before application exit.");
try {
Thread.sleep(4000);
} catch (InterruptedException ex) {
Logger.getLogger(TestPerformance.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
use of org.antlr.v4.runtime.dfa.DFA in project antlr4 by antlr.
the class TestPerformance method parseSources.
protected void parseSources(final int currentPass, final ParserFactory factory, Collection<InputDescriptor> sources, boolean shuffleSources) throws InterruptedException {
if (shuffleSources) {
List<InputDescriptor> sourcesList = new ArrayList<InputDescriptor>(sources);
synchronized (RANDOM) {
Collections.shuffle(sourcesList, RANDOM);
}
sources = sourcesList;
}
long startTime = System.nanoTime();
tokenCount.set(currentPass, 0);
int inputSize = 0;
int inputCount = 0;
Collection<Future<FileParseResult>> results = new ArrayList<Future<FileParseResult>>();
ExecutorService executorService;
if (FILE_GRANULARITY) {
executorService = Executors.newFixedThreadPool(FILE_GRANULARITY ? NUMBER_OF_THREADS : 1, new NumberedThreadFactory());
} else {
executorService = Executors.newSingleThreadExecutor(new FixedThreadNumberFactory(((NumberedThread) Thread.currentThread()).getThreadNumber()));
}
for (InputDescriptor inputDescriptor : sources) {
if (inputCount >= MAX_FILES_PER_PARSE_ITERATION) {
break;
}
final CharStream input = inputDescriptor.getInputStream();
input.seek(0);
inputSize += input.size();
inputCount++;
Future<FileParseResult> futureChecksum = executorService.submit(new Callable<FileParseResult>() {
@Override
public FileParseResult call() {
// System.out.format("Parsing file %s\n", input.getSourceName());
try {
return factory.parseFile(input, currentPass, ((NumberedThread) Thread.currentThread()).getThreadNumber());
} catch (IllegalStateException ex) {
ex.printStackTrace(System.err);
} catch (Throwable t) {
t.printStackTrace(System.err);
}
return null;
}
});
results.add(futureChecksum);
}
MurmurHashChecksum checksum = new MurmurHashChecksum();
int currentIndex = -1;
for (Future<FileParseResult> future : results) {
currentIndex++;
int fileChecksum = 0;
try {
FileParseResult fileResult = future.get();
if (COMPUTE_TRANSITION_STATS) {
totalTransitionsPerFile[currentPass][currentIndex] = sum(fileResult.parserTotalTransitions);
computedTransitionsPerFile[currentPass][currentIndex] = sum(fileResult.parserComputedTransitions);
if (DETAILED_DFA_STATE_STATS) {
decisionInvocationsPerFile[currentPass][currentIndex] = fileResult.decisionInvocations;
fullContextFallbackPerFile[currentPass][currentIndex] = fileResult.fullContextFallback;
nonSllPerFile[currentPass][currentIndex] = fileResult.nonSll;
totalTransitionsPerDecisionPerFile[currentPass][currentIndex] = fileResult.parserTotalTransitions;
computedTransitionsPerDecisionPerFile[currentPass][currentIndex] = fileResult.parserComputedTransitions;
fullContextTransitionsPerDecisionPerFile[currentPass][currentIndex] = fileResult.parserFullContextTransitions;
}
}
if (COMPUTE_TIMING_STATS) {
timePerFile[currentPass][currentIndex] = fileResult.endTime - fileResult.startTime;
tokensPerFile[currentPass][currentIndex] = fileResult.tokenCount;
}
fileChecksum = fileResult.checksum;
} catch (ExecutionException ex) {
Logger.getLogger(TestPerformance.class.getName()).log(Level.SEVERE, null, ex);
}
if (COMPUTE_CHECKSUM) {
updateChecksum(checksum, fileChecksum);
}
}
executorService.shutdown();
executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
System.out.format("%d. Total parse time for %d files (%d KB, %d tokens%s): %.0fms%n", currentPass + 1, inputCount, inputSize / 1024, tokenCount.get(currentPass), COMPUTE_CHECKSUM ? String.format(", checksum 0x%8X", checksum.getValue()) : "", (double) (System.nanoTime() - startTime) / 1000000.0);
if (sharedLexers.length > 0) {
int index = FILE_GRANULARITY ? 0 : ((NumberedThread) Thread.currentThread()).getThreadNumber();
Lexer lexer = sharedLexers[index];
final LexerATNSimulator lexerInterpreter = lexer.getInterpreter();
final DFA[] modeToDFA = lexerInterpreter.decisionToDFA;
if (SHOW_DFA_STATE_STATS) {
int states = 0;
int configs = 0;
Set<ATNConfig> uniqueConfigs = new HashSet<ATNConfig>();
for (int i = 0; i < modeToDFA.length; i++) {
DFA dfa = modeToDFA[i];
if (dfa == null) {
continue;
}
states += dfa.states.size();
for (DFAState state : dfa.states.values()) {
configs += state.configs.size();
uniqueConfigs.addAll(state.configs);
}
}
System.out.format("There are %d lexer DFAState instances, %d configs (%d unique).%n", states, configs, uniqueConfigs.size());
if (DETAILED_DFA_STATE_STATS) {
System.out.format("\tMode\tStates\tConfigs\tMode%n");
for (int i = 0; i < modeToDFA.length; i++) {
DFA dfa = modeToDFA[i];
if (dfa == null || dfa.states.isEmpty()) {
continue;
}
int modeConfigs = 0;
for (DFAState state : dfa.states.values()) {
modeConfigs += state.configs.size();
}
String modeName = lexer.getModeNames()[i];
System.out.format("\t%d\t%d\t%d\t%s%n", dfa.decision, dfa.states.size(), modeConfigs, modeName);
}
}
}
}
if (RUN_PARSER && sharedParsers.length > 0) {
int index = FILE_GRANULARITY ? 0 : ((NumberedThread) Thread.currentThread()).getThreadNumber();
Parser parser = sharedParsers[index];
// make sure the individual DFAState objects actually have unique ATNConfig arrays
final ParserATNSimulator interpreter = parser.getInterpreter();
final DFA[] decisionToDFA = interpreter.decisionToDFA;
if (SHOW_DFA_STATE_STATS) {
int states = 0;
int configs = 0;
Set<ATNConfig> uniqueConfigs = new HashSet<ATNConfig>();
for (int i = 0; i < decisionToDFA.length; i++) {
DFA dfa = decisionToDFA[i];
if (dfa == null) {
continue;
}
states += dfa.states.size();
for (DFAState state : dfa.states.values()) {
configs += state.configs.size();
uniqueConfigs.addAll(state.configs);
}
}
System.out.format("There are %d parser DFAState instances, %d configs (%d unique).%n", states, configs, uniqueConfigs.size());
if (DETAILED_DFA_STATE_STATS) {
if (COMPUTE_TRANSITION_STATS) {
System.out.format("\tDecision\tStates\tConfigs\tPredict (ALL)\tPredict (LL)\tNon-SLL\tTransitions\tTransitions (ATN)\tTransitions (LL)\tLA (SLL)\tLA (LL)\tRule%n");
} else {
System.out.format("\tDecision\tStates\tConfigs\tRule%n");
}
for (int i = 0; i < decisionToDFA.length; i++) {
DFA dfa = decisionToDFA[i];
if (dfa == null || dfa.states.isEmpty()) {
continue;
}
int decisionConfigs = 0;
for (DFAState state : dfa.states.values()) {
decisionConfigs += state.configs.size();
}
String ruleName = parser.getRuleNames()[parser.getATN().decisionToState.get(dfa.decision).ruleIndex];
long calls = 0;
long fullContextCalls = 0;
long nonSllCalls = 0;
long transitions = 0;
long computedTransitions = 0;
long fullContextTransitions = 0;
double lookahead = 0;
double fullContextLookahead = 0;
String formatString;
if (COMPUTE_TRANSITION_STATS) {
for (long[] data : decisionInvocationsPerFile[currentPass]) {
calls += data[i];
}
for (long[] data : fullContextFallbackPerFile[currentPass]) {
fullContextCalls += data[i];
}
for (long[] data : nonSllPerFile[currentPass]) {
nonSllCalls += data[i];
}
for (long[] data : totalTransitionsPerDecisionPerFile[currentPass]) {
transitions += data[i];
}
for (long[] data : computedTransitionsPerDecisionPerFile[currentPass]) {
computedTransitions += data[i];
}
for (long[] data : fullContextTransitionsPerDecisionPerFile[currentPass]) {
fullContextTransitions += data[i];
}
if (calls > 0) {
lookahead = (double) (transitions - fullContextTransitions) / (double) calls;
}
if (fullContextCalls > 0) {
fullContextLookahead = (double) fullContextTransitions / (double) fullContextCalls;
}
formatString = "\t%1$d\t%2$d\t%3$d\t%4$d\t%5$d\t%6$d\t%7$d\t%8$d\t%9$d\t%10$f\t%11$f\t%12$s%n";
} else {
calls = 0;
formatString = "\t%1$d\t%2$d\t%3$d\t%12$s%n";
}
System.out.format(formatString, dfa.decision, dfa.states.size(), decisionConfigs, calls, fullContextCalls, nonSllCalls, transitions, computedTransitions, fullContextTransitions, lookahead, fullContextLookahead, ruleName);
}
}
}
int localDfaCount = 0;
int globalDfaCount = 0;
int localConfigCount = 0;
int globalConfigCount = 0;
int[] contextsInDFAState = new int[0];
for (int i = 0; i < decisionToDFA.length; i++) {
DFA dfa = decisionToDFA[i];
if (dfa == null) {
continue;
}
if (SHOW_CONFIG_STATS) {
for (DFAState state : dfa.states.keySet()) {
if (state.configs.size() >= contextsInDFAState.length) {
contextsInDFAState = Arrays.copyOf(contextsInDFAState, state.configs.size() + 1);
}
if (state.isAcceptState) {
boolean hasGlobal = false;
for (ATNConfig config : state.configs) {
if (config.reachesIntoOuterContext > 0) {
globalConfigCount++;
hasGlobal = true;
} else {
localConfigCount++;
}
}
if (hasGlobal) {
globalDfaCount++;
} else {
localDfaCount++;
}
}
contextsInDFAState[state.configs.size()]++;
}
}
}
if (SHOW_CONFIG_STATS && currentPass == 0) {
System.out.format(" DFA accept states: %d total, %d with only local context, %d with a global context%n", localDfaCount + globalDfaCount, localDfaCount, globalDfaCount);
System.out.format(" Config stats: %d total, %d local, %d global%n", localConfigCount + globalConfigCount, localConfigCount, globalConfigCount);
if (SHOW_DFA_STATE_STATS) {
for (int i = 0; i < contextsInDFAState.length; i++) {
if (contextsInDFAState[i] != 0) {
System.out.format(" %d configs = %d%n", i, contextsInDFAState[i]);
}
}
}
}
}
if (COMPUTE_TIMING_STATS) {
System.out.format("File\tTokens\tTime%n");
for (int i = 0; i < timePerFile[currentPass].length; i++) {
System.out.format("%d\t%d\t%d%n", i + 1, tokensPerFile[currentPass][i], timePerFile[currentPass][i]);
}
}
}
use of org.antlr.v4.runtime.dfa.DFA in project antlr4 by antlr.
the class ParserATNSimulator method computeTargetState.
/**
* Compute a target state for an edge in the DFA, and attempt to add the
* computed state and corresponding edge to the DFA.
*
* @param dfa The DFA
* @param previousD The current DFA state
* @param t The next input symbol
*
* @return The computed target DFA state for the given input symbol
* {@code t}. If {@code t} does not lead to a valid DFA state, this method
* returns {@link #ERROR}.
*/
protected DFAState computeTargetState(DFA dfa, DFAState previousD, int t) {
ATNConfigSet reach = computeReachSet(previousD.configs, t, false);
if (reach == null) {
addDFAEdge(dfa, previousD, t, ERROR);
return ERROR;
}
// create new target state; we'll add to DFA after it's complete
DFAState D = new DFAState(reach);
int predictedAlt = getUniqueAlt(reach);
if (debug) {
Collection<BitSet> altSubSets = PredictionMode.getConflictingAltSubsets(reach);
System.out.println("SLL altSubSets=" + altSubSets + ", configs=" + reach + ", predict=" + predictedAlt + ", allSubsetsConflict=" + PredictionMode.allSubsetsConflict(altSubSets) + ", conflictingAlts=" + getConflictingAlts(reach));
}
if (predictedAlt != ATN.INVALID_ALT_NUMBER) {
// NO CONFLICT, UNIQUELY PREDICTED ALT
D.isAcceptState = true;
D.configs.uniqueAlt = predictedAlt;
D.prediction = predictedAlt;
} else if (PredictionMode.hasSLLConflictTerminatingPrediction(mode, reach)) {
// MORE THAN ONE VIABLE ALTERNATIVE
D.configs.conflictingAlts = getConflictingAlts(reach);
D.requiresFullContext = true;
// in SLL-only mode, we will stop at this state and return the minimum alt
D.isAcceptState = true;
D.prediction = D.configs.conflictingAlts.nextSetBit(0);
}
if (D.isAcceptState && D.configs.hasSemanticContext) {
predicateDFAState(D, atn.getDecisionState(dfa.decision));
if (D.predicates != null) {
D.prediction = ATN.INVALID_ALT_NUMBER;
}
}
// all adds to dfa are done after we've created full D state
D = addDFAEdge(dfa, previousD, t, D);
return D;
}
use of org.antlr.v4.runtime.dfa.DFA in project antlr4 by antlr.
the class ParserATNSimulator method evalSemanticContext.
/**
* Look through a list of predicate/alt pairs, returning alts for the
* pairs that win. A {@code NONE} predicate indicates an alt containing an
* unpredicated config which behaves as "always true." If !complete
* then we stop at the first predicate that evaluates to true. This
* includes pairs with null predicates.
*/
protected BitSet evalSemanticContext(DFAState.PredPrediction[] predPredictions, ParserRuleContext outerContext, boolean complete) {
BitSet predictions = new BitSet();
for (DFAState.PredPrediction pair : predPredictions) {
if (pair.pred == SemanticContext.NONE) {
predictions.set(pair.alt);
if (!complete) {
break;
}
continue;
}
// in dfa
boolean fullCtx = false;
boolean predicateEvaluationResult = evalSemanticContext(pair.pred, outerContext, pair.alt, fullCtx);
if (debug || dfa_debug) {
System.out.println("eval pred " + pair + "=" + predicateEvaluationResult);
}
if (predicateEvaluationResult) {
if (debug || dfa_debug)
System.out.println("PREDICT " + pair.alt);
predictions.set(pair.alt);
if (!complete) {
break;
}
}
}
return predictions;
}
Aggregations