use of nl.basjes.parse.useragent.config.TestCase in project yauaa by nielsbasjes.
the class RunTests method getRunTests.
// ===========================================
@Operation(description = "Fire all available test cases against the analyzer and return 200 if all tests were good")
@ApiResponse(// HttpStatus.OK
responseCode = "200", description = "All tests were good", content = @Content(mediaType = TEXT_PLAIN_VALUE, examples = @ExampleObject("All 3866 tests passed in 2994ms (average 0.775ms per testcase).")))
@ApiResponse(// HttpStatus.INTERNAL_SERVER_ERROR
responseCode = "500", description = "A test failed", content = @Content(mediaType = TEXT_PLAIN_VALUE, examples = @ExampleObject("Extensive text describing what went wrong in the test that failed")))
@GetMapping(value = "/runtests", produces = TEXT_PLAIN_VALUE)
public String getRunTests() {
UserAgentAnalyzer userAgentAnalyzer = ParseService.getUserAgentAnalyzer();
List<TestCase> testCases = userAgentAnalyzer.getTestCases();
long start = System.nanoTime();
List<TestCase> failedTests = testCases.stream().filter(testCase -> !testCase.verify(userAgentAnalyzer)).collect(Collectors.toList());
long stop = System.nanoTime();
if (failedTests.isEmpty()) {
return String.format("All %d tests passed in %dms (average %4.3fms per testcase).", testCases.size(), (stop - start) / 1_000_000, ((stop - start) / 1_000_000D / testCases.size()));
}
throw new YauaaTestsFailed("There were " + failedTests.size() + " failed tests " + "(~" + ((100.0D * failedTests.size()) / testCases.size()) + "%)");
}
use of nl.basjes.parse.useragent.config.TestCase in project yauaa by nielsbasjes.
the class TestConcurrentPerformance method testCachedMultiThreadedPerformance.
@Test
void testCachedMultiThreadedPerformance() throws InterruptedException {
// NOSONAR: Do not complain about ignored performance test
UserAgentAnalyzer uaa = UserAgentAnalyzer.newBuilder().immediateInitialization().keepTests().build();
// This testcase does not occur in the rest of the testcases (manually manipulated version for the Chrome part).
String cachedUserAgent = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/95.1.2.3.4.5.6 Safari/537.36";
List<TestCase> testCases = uaa.getTestCases();
// Make sure it is not in there.
assertEquals(0, testCases.stream().filter(testCase -> testCase.getUserAgent().equals(cachedUserAgent)).count());
long totalIterations = 0;
long totalNanosUsed = 0;
for (int i = 0; i < 10; i++) {
LOG.info("Iteration {} : Start", i);
FireAllTestCases fireTests = new FireAllTestCases(uaa, testCases);
List<RunCachedTestCase> cachedTestCases = Arrays.asList(new RunCachedTestCase(uaa, cachedUserAgent, 10_000_000), new RunCachedTestCase(uaa, cachedUserAgent, 10_000_000), new RunCachedTestCase(uaa, cachedUserAgent, 10_000_000), new RunCachedTestCase(uaa, cachedUserAgent, 10_000_000), new RunCachedTestCase(uaa, cachedUserAgent, 10_000_000), new RunCachedTestCase(uaa, cachedUserAgent, 10_000_000), new RunCachedTestCase(uaa, cachedUserAgent, 10_000_000), new RunCachedTestCase(uaa, cachedUserAgent, 10_000_000), new RunCachedTestCase(uaa, cachedUserAgent, 10_000_000), new RunCachedTestCase(uaa, cachedUserAgent, 10_000_000));
// Wipe the cache for the new run.
uaa.clearCache();
// Now parse and cache the precached useragent.
uaa.parse(cachedUserAgent);
// Start both
fireTests.start();
cachedTestCases.forEach(Thread::start);
// Wait for both to finish
fireTests.join();
for (RunCachedTestCase ctc : cachedTestCases) {
ctc.join();
}
for (RunCachedTestCase cachedTestCase : cachedTestCases) {
long iterations = cachedTestCase.getIterations();
long nanosUsed = cachedTestCase.getNanosUsed();
LOG.info("Iteration {} : Took {}ns ({}ms) = {}ns each", i, nanosUsed, (nanosUsed) / 1_000_000L, nanosUsed / iterations);
}
}
LOG.info("Average : {}ns ({}ms) = {}ns each", totalNanosUsed, (totalNanosUsed) / 1_000_000L, totalNanosUsed / totalIterations);
}
use of nl.basjes.parse.useragent.config.TestCase in project yauaa by nielsbasjes.
the class TestTestCase method badInput.
@Test
void badInput() {
TestCase testCase = new TestCase(USERAGENT, "My test");
testCase.expect("DeviceClass", "This will be wrong");
assertFalse(testCase.verify(userAgentAnalyzer, true));
}
use of nl.basjes.parse.useragent.config.TestCase in project yauaa by nielsbasjes.
the class AbstractUserAgentAnalyzerTester method runTests.
public static boolean runTests(AbstractUserAgentAnalyzerDirect analyzer, boolean showAll, boolean failOnUnexpected, Collection<String> onlyValidateFieldNames, boolean measureSpeed, boolean showPassedTests, StringBuilder errorMessageReceiver) {
analyzer.initializeMatchers();
if (analyzer.getTestCases() == null) {
return true;
}
DebugUserAgent agent = new DebugUserAgent(analyzer.getWantedFieldNames());
List<TestResult> results = new ArrayList<>(32);
String filenameHeader = "Test number and source";
int filenameHeaderLength = filenameHeader.length();
int maxFilenameLength = filenameHeaderLength;
for (TestCase test : analyzer.getTestCases()) {
Map<String, String> metaData = test.getMetadata();
String filename = metaData.get("filename");
maxFilenameLength = Math.max(maxFilenameLength, filename.length());
}
maxFilenameLength += 11;
StringBuilder sb = new StringBuilder(1024);
sb.append("| ").append(filenameHeader);
for (int i = filenameHeaderLength; i < maxFilenameLength; i++) {
sb.append(' ');
}
sb.append(" |S|AA|MF|");
if (measureSpeed) {
sb.append(" PPS| msPP|");
}
sb.append("--> S=Syntax Error, AA=Number of ambiguities during parse, MF=Matches Found");
if (measureSpeed) {
sb.append(", PPS=parses/sec, msPP=milliseconds per parse");
}
long fullStart = System.nanoTime();
if (showPassedTests) {
LOG.info("+===========================================================================================");
LOG.info("%s", sb);
LOG.info("+-------------------------------------------------------------------------------------------");
}
boolean allPass = true;
int testcount = 0;
for (TestCase test : analyzer.getTestCases()) {
testcount++;
String testName = test.getTestName();
String userAgentString = test.getUserAgent();
Map<String, String> expected = test.getExpected();
List<String> options = test.getOptions();
Map<String, String> metaData = test.getMetadata();
String filename = metaData.get("filename");
String linenumber = metaData.get("fileline");
boolean init = false;
if (options == null) {
analyzer.setVerbose(false);
agent.setDebug(false);
} else {
boolean newVerbose = options.contains("verbose");
analyzer.setVerbose(newVerbose);
agent.setDebug(newVerbose);
init = options.contains("init");
}
if (expected == null || expected.size() == 0) {
init = true;
expected = null;
}
if (testName == null) {
if (userAgentString.length() > 200) {
testName = userAgentString.substring(0, 190) + " ... ( " + userAgentString.length() + " chars)";
} else {
testName = userAgentString;
}
}
sb.setLength(0);
sb.append("|").append(String.format("%5d", testcount)).append(".(").append(filename).append(':').append(linenumber).append(')');
for (int i = filename.length() + linenumber.length() + 7; i < maxFilenameLength; i++) {
sb.append(' ');
}
agent.setUserAgentString(userAgentString);
UserAgent parseResult = null;
long measuredSpeed = -1;
if (measureSpeed) {
// Preheat
for (int i = 0; i < 100; i++) {
analyzer.parse(agent);
}
long startTime = System.nanoTime();
for (int i = 0; i < 1000; i++) {
parseResult = analyzer.parse(agent);
}
long stopTime = System.nanoTime();
measuredSpeed = (1000000000L * (1000)) / (stopTime - startTime);
} else {
parseResult = analyzer.parse(agent);
}
sb.append('|');
if (parseResult.hasSyntaxError()) {
sb.append('S');
} else {
sb.append(' ');
}
if (parseResult.hasAmbiguity()) {
sb.append(String.format("|%2d", parseResult.getAmbiguityCount()));
} else {
sb.append("| ");
}
sb.append(String.format("|%2d", agent.getNumberOfAppliedMatches()));
if (measureSpeed) {
sb.append('|').append(String.format("%5d", measuredSpeed));
sb.append('|').append(String.format("%5.2f", 1000.0 / measuredSpeed));
}
sb.append("| ").append(testName);
// We create the log line but we keep it until we know it actually must be output to the screen
String testLogLine = sb.toString();
sb.setLength(0);
boolean pass = true;
results.clear();
if (init) {
LOG.info(testLogLine);
sb.append(agent.toYamlTestCase());
LOG.info("%s", sb);
}
// "Field".length()+1; NOSONAR: This is not commented code.
int maxNameLength = 6;
// "Actual".length()+1; NOSONAR: This is not commented code.
int maxActualLength = 7;
// "Expected".length()+1; NOSONAR: This is not commented code.
int maxExpectedLength = 9;
if (expected != null) {
List<String> fieldNames = new ArrayList<>(parseResult.getAvailableFieldNamesSorted());
if (onlyValidateFieldNames != null && onlyValidateFieldNames.isEmpty()) {
onlyValidateFieldNames = null;
} else if (onlyValidateFieldNames != null) {
fieldNames.clear();
fieldNames.addAll(onlyValidateFieldNames);
}
for (String newFieldName : expected.keySet()) {
if (!fieldNames.contains(newFieldName)) {
fieldNames.add(newFieldName);
}
}
for (String fieldName : fieldNames) {
// Only check the desired fieldnames
if (onlyValidateFieldNames != null && !onlyValidateFieldNames.contains(fieldName)) {
continue;
}
TestResult result = new TestResult();
result.field = fieldName;
boolean expectedSomething;
// Actual value
result.actual = parseResult.getValue(result.field);
result.isDefault = parseResult.get(result.field).isDefaultValue();
result.confidence = parseResult.getConfidence(result.field);
if (result.actual == null) {
result.actual = NULL_VALUE;
}
// Expected value
String expectedValue = expected.get(fieldName);
if (expectedValue == null) {
expectedSomething = false;
if (result.isDefault) {
continue;
}
result.expected = "<<absent>>";
} else {
expectedSomething = true;
result.expected = expectedValue;
}
result.pass = result.actual.equals(result.expected);
if (!result.pass) {
result.warn = true;
if (expectedSomething) {
result.warn = false;
pass = false;
allPass = false;
} else {
if (failOnUnexpected) {
// We ignore this special field
if (!SYNTAX_ERROR.equals(result.field)) {
result.warn = false;
pass = false;
allPass = false;
}
}
}
}
results.add(result);
maxNameLength = Math.max(maxNameLength, result.field.length());
maxActualLength = Math.max(maxActualLength, result.actual.length());
maxExpectedLength = Math.max(maxExpectedLength, result.expected.length());
}
if (!agent.analyzeMatchersResult()) {
pass = false;
allPass = false;
}
}
if (!init && pass && !showAll) {
if (showPassedTests) {
logInfo(errorMessageReceiver, testLogLine);
}
continue;
}
if (!pass) {
logInfo(errorMessageReceiver, testLogLine);
logError(errorMessageReceiver, "| TEST FAILED !");
}
if (parseResult.hasAmbiguity()) {
logInfo(errorMessageReceiver, "| Parsing problem: Ambiguity {} times. ", parseResult.getAmbiguityCount());
}
if (parseResult.hasSyntaxError()) {
logInfo(errorMessageReceiver, "| Parsing problem: Syntax Error");
}
if (init || !pass) {
sb.setLength(0);
sb.append('\n');
sb.append('\n');
sb.append("- matcher:\n");
sb.append("# options:\n");
sb.append("# - 'verbose'\n");
sb.append(" require:\n");
for (String path : getAllPaths(userAgentString)) {
if (path.contains("=\"")) {
sb.append("# - '").append(path).append("'\n");
}
}
sb.append(" extract:\n");
sb.append("# - 'DeviceClass : 1 :' \n");
sb.append("# - 'DeviceBrand : 1 :' \n");
sb.append("# - 'DeviceName : 1 :' \n");
sb.append("# - 'OperatingSystemClass : 1 :' \n");
sb.append("# - 'OperatingSystemName : 1 :' \n");
sb.append("# - 'OperatingSystemVersion : 1 :' \n");
sb.append("# - 'LayoutEngineClass : 1 :' \n");
sb.append("# - 'LayoutEngineName : 1 :' \n");
sb.append("# - 'LayoutEngineVersion : 1 :' \n");
sb.append("# - 'AgentClass : 1 :' \n");
sb.append("# - 'AgentName : 1 :' \n");
sb.append("# - 'AgentVersion : 1 :' \n");
sb.append('\n');
sb.append('\n');
LOG.info("%s", sb);
}
sb.setLength(0);
sb.append("+--------+-");
for (int i = 0; i < maxNameLength; i++) {
sb.append('-');
}
sb.append("-+-");
for (int i = 0; i < maxActualLength; i++) {
sb.append('-');
}
sb.append("-+---------+------------+-");
for (int i = 0; i < maxExpectedLength; i++) {
sb.append('-');
}
sb.append("-+");
String separator = sb.toString();
logInfo(errorMessageReceiver, separator);
sb.setLength(0);
sb.append("| Result | Field ");
for (int i = 6; i < maxNameLength; i++) {
sb.append(' ');
}
sb.append(" | Actual ");
for (int i = 7; i < maxActualLength; i++) {
sb.append(' ');
}
sb.append(" | Default | Confidence | Expected ");
for (int i = 9; i < maxExpectedLength; i++) {
sb.append(' ');
}
sb.append(" |");
logInfo(errorMessageReceiver, sb.toString());
logInfo(errorMessageReceiver, separator);
Map<String, String> failComments = new HashMap<>();
List<String> failedFieldNames = new ArrayList<>();
for (TestResult result : results) {
sb.setLength(0);
if (result.pass) {
sb.append("| | ");
} else {
if (result.warn) {
sb.append("| ~warn~ | ");
failComments.put(result.field, "~~ Unexpected result ~~");
} else {
sb.append("| -FAIL- | ");
failComments.put(result.field, "FAILED; Should be '" + result.expected + "'");
failedFieldNames.add(result.field);
}
}
sb.append(result.field);
for (int i = result.field.length(); i < maxNameLength; i++) {
sb.append(' ');
}
sb.append(" | ");
sb.append(result.actual);
for (int i = result.actual.length(); i < maxActualLength; i++) {
sb.append(' ');
}
if (result.isDefault) {
sb.append(" | Default | ");
} else {
sb.append(" | | ");
}
sb.append(String.format("%10d", result.confidence));
sb.append(" | ");
if (result.pass) {
for (int i = 0; i < maxExpectedLength; i++) {
sb.append(' ');
}
sb.append(" |");
logInfo(errorMessageReceiver, sb.toString());
} else {
sb.append(result.expected);
for (int i = result.expected.length(); i < maxExpectedLength; i++) {
sb.append(' ');
}
sb.append(" |");
if (result.warn) {
logWarn(errorMessageReceiver, sb.toString());
} else {
logError(errorMessageReceiver, sb.toString());
}
}
}
logInfo(errorMessageReceiver, separator);
logInfo(errorMessageReceiver, "");
logInfo(errorMessageReceiver, agent.toMatchTrace(failedFieldNames));
logInfo(errorMessageReceiver, "\n\nconfig:\n{}", parseResult.toYamlTestCase(!init, failComments));
logInfo(errorMessageReceiver, "Location of failed test.({}:{})", filename, linenumber);
if (!pass && !showAll) {
return false;
}
if (init) {
return allPass;
}
}
if (showPassedTests) {
LOG.info("+===========================================================================================");
} else {
LOG.info("All %d tests passed", testcount);
}
long fullStop = System.nanoTime();
LOG.info("This took %8.3f ms for %5d tests : averaging to %6.3f msec/test (This includes test validation and logging!!)", (fullStop - fullStart) / 1000000.0, testcount, ((double) (fullStop - fullStart)) / (testcount * 1000000L));
if (testcount == 0) {
LOG.error("NO tests were run at all!!!");
allPass = false;
}
return allPass;
}
use of nl.basjes.parse.useragent.config.TestCase in project yauaa by nielsbasjes.
the class AbstractUserAgentAnalyzerTester method analyzeMatcherImpactAllTests.
public void analyzeMatcherImpactAllTests() {
if (getTestCases() == null) {
return;
}
initializeMatchers();
DebugUserAgent agent = new DebugUserAgent(getWantedFieldNames());
setVerbose(false);
agent.setDebug(false);
Map<String, MatcherImpact> impactOverview = new TreeMap<>();
List<MatcherImpact> impactList = new ArrayList<>();
getAllMatchers().stream().sorted(Comparator.comparing(Matcher::getSourceFileName).thenComparingLong(Matcher::getSourceFileLineNumber)).forEach(matcher -> {
MatcherImpact matcherImpact = new MatcherImpact();
matcherImpact.neededInputs = matcher.getActionsThatRequireInput();
matcherImpact.name = matcher.getMatcherSourceLocation();
impactOverview.put(matcher.getMatcherSourceLocation(), matcherImpact);
impactList.add(matcherImpact);
});
for (TestCase test : getTestCases()) {
String userAgentString = test.getUserAgent();
agent.setUserAgentString(userAgentString);
parse(agent);
impactOverview.forEach((n, i) -> i.tests++);
getTouchedMatchers().forEach(m -> {
MatcherImpact impact = impactOverview.get(m.getMatcherSourceLocation());
impact.touched++;
if (m.getActionsThatRequireInput() == m.getActionsThatRequireInputAndReceivedInput()) {
impact.enoughInputs++;
if (!m.getUsedMatches().isEmpty()) {
impact.used++;
}
}
});
}
impactList.forEach(i -> LOG.info("%s", i));
}
Aggregations