use of org.antlr.v4.test.runtime.java.api.JavaLexer in project antlr4 by tunnelvisionlabs.
the class TimeLexerSpeed method lex_legacy_java_utf8.
public void lex_legacy_java_utf8(int n, boolean clearLexerDFACache) throws Exception {
InputStream is = TimeLexerSpeed.class.getClassLoader().getResourceAsStream(Parser_java_file);
try {
InputStreamReader isr = new InputStreamReader(is, Charset.forName("UTF-8"));
try {
BufferedReader br = new BufferedReader(isr);
try {
@SuppressWarnings("deprecation") CharStream input = new org.antlr.v4.runtime.ANTLRInputStream(br);
JavaLexer lexer = new JavaLexer(input);
double avg = tokenize(lexer, n, clearLexerDFACache);
String currentMethodName = new Exception().getStackTrace()[0].getMethodName();
if (output)
System.out.printf("%27s average time %5dus over %4d runs of %5d symbols%s\n", currentMethodName, (int) avg, n, input.size(), clearLexerDFACache ? " DFA cleared" : "");
} finally {
br.close();
}
} finally {
isr.close();
}
} finally {
is.close();
}
}
use of org.antlr.v4.test.runtime.java.api.JavaLexer in project flink-mirror by flink-ci.
the class JavaParserTest method testConstructorCall.
@Test
public void testConstructorCall() {
String code = "public class A extends B {\n" + " private final int a, b;\n" + " public A(int a) {\n" + " this(a, 0);\n" + " }\n" + " public A(int a, int b) {\n" + " super(a, b);\n" + " this.a = a;\n" + " this.b = b;\n" + " }\n" + "}";
CommonTokenStream tokenStream = new CommonTokenStream(new JavaLexer(CharStreams.fromString(code)));
JavaParser parser = new JavaParser(tokenStream);
TestConstructorCallVisitor visitor = new TestConstructorCallVisitor();
visitor.visit(parser.compilationUnit());
Assert.assertEquals(1, visitor.thisCount);
Assert.assertEquals(1, visitor.superCount);
}
use of org.antlr.v4.test.runtime.java.api.JavaLexer in project flink by splunk.
the class MemberFieldRewriter method prepareRewrite.
private JavaParser prepareRewrite() {
CommonTokenStream tokenStream = new CommonTokenStream(new JavaLexer(CharStreams.fromString(code)));
this.rewriter = new TokenStreamRewriter(tokenStream);
JavaParser javaParser = new JavaParser(tokenStream);
javaParser.getInterpreter().setPredictionMode(PredictionMode.SLL);
return javaParser;
}
use of org.antlr.v4.test.runtime.java.api.JavaLexer in project antlr4 by antlr.
the class TestPerformance method compileJdk.
@Test
@org.junit.Ignore
public void compileJdk() throws IOException, InterruptedException, ExecutionException {
String jdkSourceRoot = getSourceRoot("JDK");
assertTrue("The JDK_SOURCE_ROOT environment variable must be set for performance testing.", jdkSourceRoot != null && !jdkSourceRoot.isEmpty());
compileJavaParser(USE_LR_GRAMMAR);
final String lexerName = USE_LR_GRAMMAR ? "JavaLRLexer" : "JavaLexer";
final String parserName = USE_LR_GRAMMAR ? "JavaLRParser" : "JavaParser";
final String listenerName = USE_LR_GRAMMAR ? "JavaLRBaseListener" : "JavaBaseListener";
final String entryPoint = "compilationUnit";
final ParserFactory factory = getParserFactory(lexerName, parserName, listenerName, entryPoint);
if (!TOP_PACKAGE.isEmpty()) {
jdkSourceRoot = jdkSourceRoot + '/' + TOP_PACKAGE.replace('.', '/');
}
File directory = new File(jdkSourceRoot);
assertTrue(directory.isDirectory());
FilenameFilter filesFilter = FilenameFilters.extension(".java", false);
FilenameFilter directoriesFilter = FilenameFilters.ALL_FILES;
final List<InputDescriptor> sources = loadSources(directory, filesFilter, directoriesFilter, RECURSIVE);
for (int i = 0; i < PASSES; i++) {
if (COMPUTE_TRANSITION_STATS) {
totalTransitionsPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)];
computedTransitionsPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)];
if (DETAILED_DFA_STATE_STATS) {
decisionInvocationsPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
fullContextFallbackPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
nonSllPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
totalTransitionsPerDecisionPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
computedTransitionsPerDecisionPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
fullContextTransitionsPerDecisionPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
}
}
if (COMPUTE_TIMING_STATS) {
timePerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)];
tokensPerFile[i] = new int[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)];
}
}
System.out.format("Located %d source files.%n", sources.size());
System.out.print(getOptionsDescription(TOP_PACKAGE));
ExecutorService executorService = Executors.newFixedThreadPool(FILE_GRANULARITY ? 1 : NUMBER_OF_THREADS, new NumberedThreadFactory());
List<Future<?>> passResults = new ArrayList<Future<?>>();
passResults.add(executorService.submit(new Runnable() {
@Override
public void run() {
try {
parse1(0, factory, sources, SHUFFLE_FILES_AT_START);
} catch (InterruptedException ex) {
Logger.getLogger(TestPerformance.class.getName()).log(Level.SEVERE, null, ex);
}
}
}));
for (int i = 0; i < PASSES - 1; i++) {
final int currentPass = i + 1;
passResults.add(executorService.submit(new Runnable() {
@Override
public void run() {
if (CLEAR_DFA) {
int index = FILE_GRANULARITY ? 0 : ((NumberedThread) Thread.currentThread()).getThreadNumber();
if (sharedLexers.length > 0 && sharedLexers[index] != null) {
ATN atn = sharedLexers[index].getATN();
for (int j = 0; j < sharedLexers[index].getInterpreter().decisionToDFA.length; j++) {
sharedLexers[index].getInterpreter().decisionToDFA[j] = new DFA(atn.getDecisionState(j), j);
}
}
if (sharedParsers.length > 0 && sharedParsers[index] != null) {
ATN atn = sharedParsers[index].getATN();
for (int j = 0; j < sharedParsers[index].getInterpreter().decisionToDFA.length; j++) {
sharedParsers[index].getInterpreter().decisionToDFA[j] = new DFA(atn.getDecisionState(j), j);
}
}
if (FILE_GRANULARITY) {
Arrays.fill(sharedLexers, null);
Arrays.fill(sharedParsers, null);
}
}
try {
parse2(currentPass, factory, sources, SHUFFLE_FILES_AFTER_ITERATIONS);
} catch (InterruptedException ex) {
Logger.getLogger(TestPerformance.class.getName()).log(Level.SEVERE, null, ex);
}
}
}));
}
for (Future<?> passResult : passResults) {
passResult.get();
}
executorService.shutdown();
executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
if (COMPUTE_TRANSITION_STATS && SHOW_TRANSITION_STATS_PER_FILE) {
computeTransitionStatistics();
}
if (COMPUTE_TIMING_STATS) {
computeTimingStatistics();
}
sources.clear();
if (PAUSE_FOR_HEAP_DUMP) {
System.gc();
System.out.println("Pausing before application exit.");
try {
Thread.sleep(4000);
} catch (InterruptedException ex) {
Logger.getLogger(TestPerformance.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
use of org.antlr.v4.test.runtime.java.api.JavaLexer in project antlr4 by tunnelvisionlabs.
the class TestPerformance method compileJdk.
@Test
public // @org.junit.Ignore
void compileJdk() throws IOException, InterruptedException, ExecutionException {
String jdkSourceRoot = getSourceRoot("JDK");
assertTrue("The JDK_SOURCE_ROOT environment variable must be set for performance testing.", jdkSourceRoot != null && !jdkSourceRoot.isEmpty());
compileJavaParser(USE_LR_GRAMMAR);
final String lexerName = USE_LR_GRAMMAR ? "JavaLRLexer" : "JavaLexer";
final String parserName = USE_LR_GRAMMAR ? "JavaLRParser" : "JavaParser";
final String listenerName = USE_LR_GRAMMAR ? "JavaLRBaseListener" : "JavaBaseListener";
final String entryPoint = "compilationUnit";
final ParserFactory factory = getParserFactory(lexerName, parserName, listenerName, entryPoint);
if (!TOP_PACKAGE.isEmpty()) {
jdkSourceRoot = jdkSourceRoot + '/' + TOP_PACKAGE.replace('.', '/');
}
File directory = new File(jdkSourceRoot);
assertTrue(directory.isDirectory());
FilenameFilter filesFilter = FilenameFilters.extension(".java", false);
FilenameFilter directoriesFilter = FilenameFilters.ALL_FILES;
final List<InputDescriptor> sources = loadSources(directory, filesFilter, directoriesFilter, RECURSIVE);
for (int i = 0; i < PASSES; i++) {
if (COMPUTE_TRANSITION_STATS) {
totalTransitionsPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)];
computedTransitionsPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)];
if (DETAILED_DFA_STATE_STATS) {
decisionInvocationsPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
fullContextFallbackPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
nonSllPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
totalTransitionsPerDecisionPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
computedTransitionsPerDecisionPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
fullContextTransitionsPerDecisionPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
}
}
if (COMPUTE_TIMING_STATS) {
timePerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)];
tokensPerFile[i] = new int[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)];
}
}
System.out.format("Located %d source files.%n", sources.size());
System.out.print(getOptionsDescription(TOP_PACKAGE));
ExecutorService executorService = Executors.newFixedThreadPool(FILE_GRANULARITY ? 1 : NUMBER_OF_THREADS, new NumberedThreadFactory());
List<Future<?>> passResults = new ArrayList<Future<?>>();
passResults.add(executorService.submit(new Runnable() {
@Override
public void run() {
try {
parse1(0, factory, sources, SHUFFLE_FILES_AT_START);
} catch (InterruptedException ex) {
Logger.getLogger(TestPerformance.class.getName()).log(Level.SEVERE, null, ex);
}
}
}));
for (int i = 0; i < PASSES - 1; i++) {
final int currentPass = i + 1;
passResults.add(executorService.submit(new Runnable() {
@Override
public void run() {
if (CLEAR_DFA) {
int index = FILE_GRANULARITY ? 0 : ((NumberedThread) Thread.currentThread()).getThreadNumber();
if (sharedLexers.length > 0 && sharedLexers[index] != null) {
ATN atn = sharedLexers[index].getATN();
atn.clearDFA();
}
if (sharedParsers.length > 0 && sharedParsers[index] != null) {
ATN atn = sharedParsers[index].getATN();
atn.clearDFA();
}
if (FILE_GRANULARITY) {
Arrays.fill(sharedLexers, null);
Arrays.fill(sharedParsers, null);
}
}
try {
parse2(currentPass, factory, sources, SHUFFLE_FILES_AFTER_ITERATIONS);
} catch (InterruptedException ex) {
Logger.getLogger(TestPerformance.class.getName()).log(Level.SEVERE, null, ex);
}
}
}));
}
for (Future<?> passResult : passResults) {
passResult.get();
}
executorService.shutdown();
executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
if (COMPUTE_TRANSITION_STATS && SHOW_TRANSITION_STATS_PER_FILE) {
computeTransitionStatistics();
}
if (COMPUTE_TIMING_STATS) {
computeTimingStatistics();
}
sources.clear();
if (PAUSE_FOR_HEAP_DUMP) {
System.gc();
System.out.println("Pausing before application exit.");
try {
Thread.sleep(4000);
} catch (InterruptedException ex) {
Logger.getLogger(TestPerformance.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
Aggregations