use of org.apache.tez.runtime.api.InputInitializerContext in project hive by apache.
the class TestDynamicPartitionPruner method testSingleSourceMultipleFiltersOrdering1.
@Test(timeout = 5000)
public void testSingleSourceMultipleFiltersOrdering1() throws InterruptedException, SerDeException {
InputInitializerContext mockInitContext = mock(InputInitializerContext.class);
doReturn(2).when(mockInitContext).getVertexNumTasks("v1");
MapWork mapWork = createMockMapWork(new TestSource("v1", 2));
DynamicPartitionPruner pruner = new DynamicPartitionPrunerForEventTesting(mockInitContext, mapWork);
PruneRunnable pruneRunnable = new PruneRunnable(pruner);
Thread t = new Thread(pruneRunnable);
t.start();
try {
pruneRunnable.start();
InputInitializerEvent event = InputInitializerEvent.create("FakeTarget", "TargetInput", ByteBuffer.allocate(0));
event.setSourceVertexName("v1");
pruner.addEvent(event);
pruner.addEvent(event);
pruner.addEvent(event);
pruner.addEvent(event);
pruner.processVertex("v1");
pruneRunnable.awaitEnd();
assertFalse(pruneRunnable.inError.get());
} finally {
t.interrupt();
t.join();
}
}
use of org.apache.tez.runtime.api.InputInitializerContext in project hive by apache.
the class TestDynamicPartitionPruner method testSingleSourceOrdering2.
@Test(timeout = 5000)
public void testSingleSourceOrdering2() throws InterruptedException, IOException, HiveException, SerDeException {
InputInitializerContext mockInitContext = mock(InputInitializerContext.class);
doReturn(1).when(mockInitContext).getVertexNumTasks("v1");
MapWork mapWork = createMockMapWork(new TestSource("v1", 1));
DynamicPartitionPruner pruner = new DynamicPartitionPrunerForEventTesting(mockInitContext, mapWork);
PruneRunnable pruneRunnable = new PruneRunnable(pruner);
Thread t = new Thread(pruneRunnable);
t.start();
try {
pruneRunnable.start();
InputInitializerEvent event = InputInitializerEvent.create("FakeTarget", "TargetInput", ByteBuffer.allocate(0));
event.setSourceVertexName("v1");
pruner.processVertex("v1");
pruner.addEvent(event);
pruneRunnable.awaitEnd();
assertFalse(pruneRunnable.inError.get());
} finally {
t.interrupt();
t.join();
}
}
use of org.apache.tez.runtime.api.InputInitializerContext in project hive by apache.
the class TestDynamicPartitionPruner method testMultipleSourcesOrdering1.
@Test(timeout = 5000)
public void testMultipleSourcesOrdering1() throws InterruptedException, SerDeException {
InputInitializerContext mockInitContext = mock(InputInitializerContext.class);
doReturn(2).when(mockInitContext).getVertexNumTasks("v1");
doReturn(3).when(mockInitContext).getVertexNumTasks("v2");
MapWork mapWork = createMockMapWork(new TestSource("v1", 2), new TestSource("v2", 1));
DynamicPartitionPruner pruner = new DynamicPartitionPrunerForEventTesting(mockInitContext, mapWork);
PruneRunnable pruneRunnable = new PruneRunnable(pruner);
Thread t = new Thread(pruneRunnable);
t.start();
try {
pruneRunnable.start();
InputInitializerEvent eventV1 = InputInitializerEvent.create("FakeTarget", "TargetInput", ByteBuffer.allocate(0));
eventV1.setSourceVertexName("v1");
InputInitializerEvent eventV2 = InputInitializerEvent.create("FakeTarget", "TargetInput", ByteBuffer.allocate(0));
eventV2.setSourceVertexName("v2");
// 2 X 2 events for V1. 3 X 1 events for V2
pruner.addEvent(eventV1);
pruner.addEvent(eventV1);
pruner.addEvent(eventV1);
pruner.addEvent(eventV1);
pruner.addEvent(eventV2);
pruner.addEvent(eventV2);
pruner.addEvent(eventV2);
pruner.processVertex("v1");
pruner.processVertex("v2");
pruneRunnable.awaitEnd();
assertFalse(pruneRunnable.inError.get());
} finally {
t.interrupt();
t.join();
}
}
use of org.apache.tez.runtime.api.InputInitializerContext in project hive by apache.
the class TestDynamicPartitionPruner method testExtraEvents.
@Test(timeout = 5000, expected = IllegalStateException.class)
public void testExtraEvents() throws InterruptedException, IOException, HiveException, SerDeException {
InputInitializerContext mockInitContext = mock(InputInitializerContext.class);
doReturn(1).when(mockInitContext).getVertexNumTasks("v1");
MapWork mapWork = createMockMapWork(new TestSource("v1", 1));
DynamicPartitionPruner pruner = new DynamicPartitionPrunerForEventTesting(mockInitContext, mapWork);
PruneRunnable pruneRunnable = new PruneRunnable(pruner);
Thread t = new Thread(pruneRunnable);
t.start();
try {
pruneRunnable.start();
InputInitializerEvent event = InputInitializerEvent.create("FakeTarget", "TargetInput", ByteBuffer.allocate(0));
event.setSourceVertexName("v1");
pruner.addEvent(event);
pruner.addEvent(event);
pruner.processVertex("v1");
pruneRunnable.awaitEnd();
assertFalse(pruneRunnable.inError.get());
} finally {
t.interrupt();
t.join();
}
}
use of org.apache.tez.runtime.api.InputInitializerContext in project hive by apache.
the class TestDynamicPartitionPruner method testMultipleSourcesOrdering3.
@Test(timeout = 5000)
public void testMultipleSourcesOrdering3() throws InterruptedException, SerDeException {
InputInitializerContext mockInitContext = mock(InputInitializerContext.class);
doReturn(2).when(mockInitContext).getVertexNumTasks("v1");
doReturn(3).when(mockInitContext).getVertexNumTasks("v2");
MapWork mapWork = createMockMapWork(new TestSource("v1", 2), new TestSource("v2", 1));
DynamicPartitionPruner pruner = new DynamicPartitionPrunerForEventTesting(mockInitContext, mapWork);
PruneRunnable pruneRunnable = new PruneRunnable(pruner);
Thread t = new Thread(pruneRunnable);
t.start();
try {
pruneRunnable.start();
InputInitializerEvent eventV1 = InputInitializerEvent.create("FakeTarget", "TargetInput", ByteBuffer.allocate(0));
eventV1.setSourceVertexName("v1");
InputInitializerEvent eventV2 = InputInitializerEvent.create("FakeTarget", "TargetInput", ByteBuffer.allocate(0));
eventV2.setSourceVertexName("v2");
// 2 X 2 events for V1. 3 X 1 events for V2
pruner.addEvent(eventV1);
pruner.addEvent(eventV1);
pruner.processVertex("v1");
pruner.addEvent(eventV1);
pruner.addEvent(eventV1);
pruner.addEvent(eventV2);
pruner.processVertex("v2");
pruner.addEvent(eventV2);
pruner.addEvent(eventV2);
pruneRunnable.awaitEnd();
assertFalse(pruneRunnable.inError.get());
} finally {
t.interrupt();
t.join();
}
}
Aggregations