use of org.apache.hadoop.hive.llap.daemon.impl.TaskExecutorService.TaskWrapper in project hive by apache.
the class TestTaskExecutorService method testPreemptionStateOnTaskFlagChanges.
@Test(timeout = 10000)
public void testPreemptionStateOnTaskFlagChanges() throws InterruptedException {
MockRequest r1 = createMockRequest(1, 1, 100, 200, false, 20000l, false);
MockRequest r2 = createMockRequest(2, 1, 100, 200, true, 2000000l, true);
TaskExecutorServiceForTest taskExecutorService = new TaskExecutorServiceForTest(1, 2, ShortestJobFirstComparator.class.getName(), true);
taskExecutorService.init(new Configuration());
taskExecutorService.start();
try {
String fragmentId = r1.getRequestId();
Scheduler.SubmissionState submissionState = taskExecutorService.schedule(r1);
assertEquals(Scheduler.SubmissionState.ACCEPTED, submissionState);
awaitStartAndSchedulerRun(r1, taskExecutorService);
TaskWrapper taskWrapper = taskExecutorService.preemptionQueue.peek();
assertNotNull(taskWrapper);
assertTrue(taskWrapper.isInPreemptionQueue());
// Now notify the executorService that the task has moved to finishable state.
r1.setCanUpdateFinishable();
taskWrapper.finishableStateUpdated(true);
TaskWrapper taskWrapper2 = taskExecutorService.preemptionQueue.peek();
assertNotNull(taskWrapper2);
assertTrue(taskWrapper.isInPreemptionQueue());
// And got a duck.
boolean result = taskExecutorService.updateFragment(fragmentId, true);
assertTrue(result);
taskWrapper2 = taskExecutorService.preemptionQueue.peek();
assertNull(taskWrapper2);
assertFalse(taskWrapper.isInPreemptionQueue());
r1.complete();
r1.awaitEnd();
// Now start with everything and test losing stuff.
fragmentId = r2.getRequestId();
submissionState = taskExecutorService.schedule(r2);
assertEquals(Scheduler.SubmissionState.ACCEPTED, submissionState);
awaitStartAndSchedulerRun(r2, taskExecutorService);
taskWrapper = taskExecutorService.preemptionQueue.peek();
assertNull(taskWrapper);
// Lost the duck.
result = taskExecutorService.updateFragment(fragmentId, false);
assertTrue(result);
taskWrapper = taskExecutorService.preemptionQueue.peek();
assertNotNull(taskWrapper);
assertTrue(taskWrapper.isInPreemptionQueue());
// Gained it again.
result = taskExecutorService.updateFragment(fragmentId, true);
assertTrue(result);
taskWrapper2 = taskExecutorService.preemptionQueue.peek();
assertNull(taskWrapper2);
assertFalse(taskWrapper.isInPreemptionQueue());
// Now lost a finishable state.
r2.setCanUpdateFinishable();
taskWrapper.finishableStateUpdated(false);
taskWrapper2 = taskExecutorService.preemptionQueue.peek();
assertNotNull(taskWrapper2);
assertTrue(taskWrapper.isInPreemptionQueue());
r2.complete();
r2.awaitEnd();
} finally {
taskExecutorService.shutDown(false);
}
}
use of org.apache.hadoop.hive.llap.daemon.impl.TaskExecutorService.TaskWrapper in project hive by apache.
the class TestShortestJobFirstComparator method testWaitQueueComparatorParallelism.
@Test(timeout = 60000)
public void testWaitQueueComparatorParallelism() throws InterruptedException {
// 7 pending
TaskWrapper r1 = createTaskWrapper(createSubmitWorkRequestProto(1, 10, 3, 10, 100, 1, "q1", false), false, 100000);
// 3 pending
TaskWrapper r2 = createTaskWrapper(createSubmitWorkRequestProto(2, 10, 7, 10, 100, 1, "q2", false), false, 100000);
// 5 pending
TaskWrapper r3 = createTaskWrapper(createSubmitWorkRequestProto(3, 10, 5, 10, 100, 1, "q3", false), false, 100000);
EvictingPriorityBlockingQueue<TaskWrapper> queue = new EvictingPriorityBlockingQueue<>(new ShortestJobFirstComparator(), 4);
assertNull(queue.offer(r1, 0));
assertNull(queue.offer(r2, 0));
assertNull(queue.offer(r3, 0));
assertEquals(r2, queue.take());
assertEquals(r3, queue.take());
assertEquals(r1, queue.take());
}
use of org.apache.hadoop.hive.llap.daemon.impl.TaskExecutorService.TaskWrapper in project hive by apache.
the class TestTaskExecutorService method testPreemptionStateOnTaskMoveToNonFinishableState.
@Test(timeout = 10000)
public void testPreemptionStateOnTaskMoveToNonFinishableState() throws InterruptedException {
MockRequest r1 = createMockRequest(1, 1, 100, 200, true, 20000l);
TaskExecutorServiceForTest taskExecutorService = new TaskExecutorServiceForTest(1, 2, ShortestJobFirstComparator.class.getName(), true);
taskExecutorService.init(new Configuration());
taskExecutorService.start();
try {
Scheduler.SubmissionState submissionState = taskExecutorService.schedule(r1);
assertEquals(Scheduler.SubmissionState.ACCEPTED, submissionState);
awaitStartAndSchedulerRun(r1, taskExecutorService);
TaskWrapper taskWrapper = taskExecutorService.preemptionQueue.peek();
assertNull(taskWrapper);
assertEquals(1, taskExecutorService.knownTasks.size());
taskWrapper = taskExecutorService.knownTasks.entrySet().iterator().next().getValue();
assertFalse(taskWrapper.isInPreemptionQueue());
// Now notify the executorService that the task has moved to finishable state.
taskWrapper.finishableStateUpdated(false);
TaskWrapper taskWrapper2 = taskExecutorService.preemptionQueue.peek();
assertNotNull(taskWrapper2);
assertTrue(taskWrapper2.isInPreemptionQueue());
assertEquals(taskWrapper, taskWrapper2);
r1.complete();
r1.awaitEnd();
} finally {
taskExecutorService.shutDown(false);
}
}
use of org.apache.hadoop.hive.llap.daemon.impl.TaskExecutorService.TaskWrapper in project hive by apache.
the class TestFirstInFirstOutComparator method testWaitQueueComparatorWithinDagPriority.
@Test(timeout = 60000)
public void testWaitQueueComparatorWithinDagPriority() throws InterruptedException {
TaskWrapper r1 = createTaskWrapper(createRequest(1, 1, 0, 100, 100, 10), false, 100000);
TaskWrapper r2 = createTaskWrapper(createRequest(2, 1, 0, 100, 100, 1), false, 100000);
TaskWrapper r3 = createTaskWrapper(createRequest(3, 1, 0, 100, 100, 5), false, 100000);
EvictingPriorityBlockingQueue<TaskWrapper> queue = new EvictingPriorityBlockingQueue<>(new FirstInFirstOutComparator(), 4);
assertNull(queue.offer(r1, 0));
assertNull(queue.offer(r2, 0));
assertNull(queue.offer(r3, 0));
assertEquals(r2, queue.take());
assertEquals(r3, queue.take());
assertEquals(r1, queue.take());
}
use of org.apache.hadoop.hive.llap.daemon.impl.TaskExecutorService.TaskWrapper in project hive by apache.
the class TestShortestJobFirstComparator method testWaitQueueComparatorWithinSameDagPriority.
@Test(timeout = 60000)
public void testWaitQueueComparatorWithinSameDagPriority() throws InterruptedException {
TaskWrapper r1 = createTaskWrapper(createSubmitWorkRequestProto(1, 1, 0, 10, 100, 10), true, 100000);
TaskWrapper r2 = createTaskWrapper(createSubmitWorkRequestProto(2, 1, 0, 10, 100, 10), true, 100000);
TaskWrapper r3 = createTaskWrapper(createSubmitWorkRequestProto(3, 1, 0, 10, 100, 10), true, 100000);
EvictingPriorityBlockingQueue<TaskWrapper> queue = new EvictingPriorityBlockingQueue<>(new ShortestJobFirstComparator(), 3);
assertNull(queue.offer(r1, 0));
assertNull(queue.offer(r2, 0));
assertNull(queue.offer(r3, 0));
// can not queue more requests as queue is full
TaskWrapper r4 = createTaskWrapper(createSubmitWorkRequestProto(4, 1, 0, 10, 100, 10), true, 100000);
assertEquals(r4, queue.offer(r4, 0));
}
Aggregations