use of edu.uci.ics.crawler4j.robotstxt.HostDirectives in project crawler4j by yasserg.
the class RobotstxtParserNonLowercaseUserAgentTest method testParseWithNonLowercaseUserAgent.
@Test
public void testParseWithNonLowercaseUserAgent() {
String userAgent = "testAgent";
String content = "User-agent: " + userAgent + '\n' + "Disallow: /test/path/\n";
final RobotstxtConfig robotsConfig = new RobotstxtConfig();
robotsConfig.setUserAgentName(userAgent);
HostDirectives hostDirectives = RobotstxtParser.parse(content, robotsConfig);
assertNotNull("parsed HostDirectives is null", hostDirectives);
assertFalse("HostDirectives should not allow path: '/test/path/'", hostDirectives.allows("/test/path/"));
}
Aggregations