use of org.codelibs.fess.crawler.entity.UrlQueueImpl in project fess-crawler by codelibs.
the class HostIntervalControllerTest method test_delayBeforeProcessing.
/**
* 同一ホストに対するクローリングのインターバルが正しく動作すること
*/
public void test_delayBeforeProcessing() {
// 同時実行数
final int numTasks = 100;
// インターバル
final Long waittime = 100L;
CrawlingParameterUtil.setUrlQueue(new UrlQueueImpl());
final UrlQueue q = CrawlingParameterUtil.getUrlQueue();
for (int i = 0; i < numTasks; i++) {
q.setUrl("http://example.com");
}
final HostIntervalController controller = new HostIntervalController();
controller.delayMillisBeforeProcessing = waittime;
controller.delayMillisAfterProcessing = 0L;
controller.delayMillisForWaitingNewUrl = 0L;
controller.delayMillisAtNoUrlInQueue = 0L;
final Callable<Integer> testCallable = new Callable<Integer>() {
public Integer call() throws Exception {
CrawlingParameterUtil.setUrlQueue(q);
controller.delayBeforeProcessing();
return 0;
}
};
// Callableタスクを複数生成
final List<Callable<Integer>> tasks = new ArrayList<Callable<Integer>>();
for (int i = 0; i < numTasks; i++) {
tasks.add(testCallable);
}
// 時間取得
final long time = System.nanoTime();
// Callableタスク(複数)を実行する
final ExecutorService executor = Executors.newFixedThreadPool(numTasks);
try {
final List<Future<Integer>> futures = executor.invokeAll(tasks);
for (final Future<Integer> future : futures) {
future.get();
}
} catch (final InterruptedException e) {
// no thing to do
} catch (final ExecutionException e) {
// no thing to do
}
long elapsed = (System.nanoTime() - time) / 1000000;
long wait = waittime * (numTasks - 1);
assertTrue(elapsed + " >= " + wait, elapsed >= wait);
}
Aggregations