use of io.prestosql.snapshot.SnapshotUtils in project hetu-core by openlookeng.
the class TestOrderByOperator method testCaptureRestoreWithSpillToHdfsEnabled.
/**
* This test is supposed to consume 4 pages and produce the output page with sorted ordering.
* The spilling and capturing('capture1') happened after the first 2 pages added into the operator.
* The operator is rescheduled after 4 pages added (but before finish() is called).
*
* @throws Exception
*/
@Test
public void testCaptureRestoreWithSpillToHdfsEnabled() throws Exception {
// Initialization
Path spillPath = Paths.get("/tmp/hetu/snapshot/");
HetuHdfsFileSystemClient fs = getLocalHdfs();
when(fileSystemClientManager.getFileSystemClient(any(String.class), any(Path.class))).thenReturn(fs);
GenericSpillerFactory genericSpillerFactory = createGenericSpillerFactory(spillPath, fileSystemClientManager, true, "hdfs");
SnapshotConfig snapshotConfig = new SnapshotConfig();
snapshotConfig.setSpillProfile("hdfs");
snapshotConfig.setSpillToHdfs(true);
snapshotUtils = new SnapshotUtils(fileSystemClientManager, snapshotConfig, new InMemoryNodeManager());
snapshotUtils.initialize();
List<Page> input1 = rowPagesBuilder(VARCHAR, BIGINT).row("a", 1L).row("b", 2L).pageBreak().row("b", 3L).row("a", 4L).build();
List<Page> input2 = rowPagesBuilder(VARCHAR, BIGINT).row("c", 4L).row("d", 6L).pageBreak().row("c", 2L).row("d", 3L).build();
OrderByOperatorFactory operatorFactory = new OrderByOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(VARCHAR, BIGINT), ImmutableList.of(0, 1), 10, ImmutableList.of(0, 1), ImmutableList.of(ASC_NULLS_LAST, DESC_NULLS_LAST), new PagesIndex.TestingFactory(false), true, Optional.of(genericSpillerFactory), new OrderingCompiler(), false);
DriverContext driverContext = createDriverContext(defaultMemoryLimit, TEST_SNAPSHOT_SESSION);
driverContext.getPipelineContext().getTaskContext().getSnapshotManager().setTotalComponents(1);
OrderByOperator orderByOperator = (OrderByOperator) operatorFactory.createOperator(driverContext);
// Step1: add the first 2 pages
for (Page page : input1) {
orderByOperator.addInput(page);
}
// Step2: spilling happened here
getFutureValue(orderByOperator.startMemoryRevoke());
orderByOperator.finishMemoryRevoke();
// Step3: add a marker page to make 'capture1' happened
MarkerPage marker = MarkerPage.snapshotPage(1);
orderByOperator.addInput(marker);
// Step4: add another 2 pages
for (Page page : input2) {
orderByOperator.addInput(page);
}
// Step5: assume the task is rescheduled due to failure and everything is re-constructed
driverContext = createDriverContext(defaultMemoryLimit, TEST_SNAPSHOT_SESSION);
operatorFactory = new OrderByOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(VARCHAR, BIGINT), ImmutableList.of(0, 1), 10, ImmutableList.of(0, 1), ImmutableList.of(ASC_NULLS_LAST, DESC_NULLS_LAST), new PagesIndex.TestingFactory(false), true, Optional.of(genericSpillerFactory), new OrderingCompiler(), false);
orderByOperator = (OrderByOperator) operatorFactory.createOperator(driverContext);
// Step6: restore to 'capture1', the spiller should contains the reference of the first 2 pages for now.
MarkerPage resumeMarker = MarkerPage.resumePage(1);
orderByOperator.addInput(resumeMarker);
// Step7: continue to add another 2 pages
for (Page page : input2) {
orderByOperator.addInput(page);
}
orderByOperator.finish();
// Compare the results
MaterializedResult expected = resultBuilder(driverContext.getSession(), VARCHAR, BIGINT).row("a", 4L).row("a", 1L).row("b", 3L).row("b", 2L).row("c", 4L).row("c", 2L).row("d", 6L).row("d", 3L).build();
ImmutableList.Builder<Page> outputPages = ImmutableList.builder();
Page p = orderByOperator.getOutput();
while (p instanceof MarkerPage) {
p = orderByOperator.getOutput();
}
outputPages.add(p);
MaterializedResult actual = toMaterializedResult(driverContext.getSession(), expected.getTypes(), outputPages.build());
Assert.assertEquals(actual, expected);
}
use of io.prestosql.snapshot.SnapshotUtils in project hetu-core by openlookeng.
the class TestOrderByOperator method testCaptureRestoreWithSpill.
/**
* This test is supposed to consume 4 pages and produce the output page with sorted ordering.
* The spilling and capturing('capture1') happened after the first 2 pages added into the operator.
* The operator is rescheduled after 4 pages added (but before finish() is called).
*
* @throws Exception
*/
@Test
public void testCaptureRestoreWithSpill() throws Exception {
// Initialization
Path spillPath = Paths.get("/tmp/hetu/snapshot/");
GenericSpillerFactory genericSpillerFactory = createGenericSpillerFactory(spillPath, fileSystemClientManager, false, null);
SnapshotConfig snapshotConfig = new SnapshotConfig();
snapshotUtils = new SnapshotUtils(fileSystemClientManager, snapshotConfig, new InMemoryNodeManager());
snapshotUtils.initialize();
List<Page> input1 = rowPagesBuilder(VARCHAR, BIGINT).row("a", 1L).row("b", 2L).pageBreak().row("b", 3L).row("a", 4L).build();
List<Page> input2 = rowPagesBuilder(VARCHAR, BIGINT).row("c", 4L).row("d", 6L).pageBreak().row("c", 2L).row("d", 3L).build();
OrderByOperatorFactory operatorFactory = new OrderByOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(VARCHAR, BIGINT), ImmutableList.of(0, 1), 10, ImmutableList.of(0, 1), ImmutableList.of(ASC_NULLS_LAST, DESC_NULLS_LAST), new PagesIndex.TestingFactory(false), true, Optional.of(genericSpillerFactory), new OrderingCompiler(), false);
DriverContext driverContext = createDriverContext(defaultMemoryLimit, TEST_SNAPSHOT_SESSION);
driverContext.getPipelineContext().getTaskContext().getSnapshotManager().setTotalComponents(1);
OrderByOperator orderByOperator = (OrderByOperator) operatorFactory.createOperator(driverContext);
// Step1: add the first 2 pages
for (Page page : input1) {
orderByOperator.addInput(page);
}
// Step2: spilling happened here
getFutureValue(orderByOperator.startMemoryRevoke());
orderByOperator.finishMemoryRevoke();
// Step3: add a marker page to make 'capture1' happened
MarkerPage marker = MarkerPage.snapshotPage(1);
orderByOperator.addInput(marker);
// Step4: add another 2 pages
for (Page page : input2) {
orderByOperator.addInput(page);
}
// Step5: assume the task is rescheduled due to failure and everything is re-constructed
driverContext = createDriverContext(defaultMemoryLimit, TEST_SNAPSHOT_SESSION);
operatorFactory = new OrderByOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(VARCHAR, BIGINT), ImmutableList.of(0, 1), 10, ImmutableList.of(0, 1), ImmutableList.of(ASC_NULLS_LAST, DESC_NULLS_LAST), new PagesIndex.TestingFactory(false), true, Optional.of(genericSpillerFactory), new OrderingCompiler(), false);
orderByOperator = (OrderByOperator) operatorFactory.createOperator(driverContext);
// Step6: restore to 'capture1', the spiller should contains the reference of the first 2 pages for now.
MarkerPage resumeMarker = MarkerPage.resumePage(1);
orderByOperator.addInput(resumeMarker);
// Step7: continue to add another 2 pages
for (Page page : input2) {
orderByOperator.addInput(page);
}
orderByOperator.finish();
// Compare the results
MaterializedResult expected = resultBuilder(driverContext.getSession(), VARCHAR, BIGINT).row("a", 4L).row("a", 1L).row("b", 3L).row("b", 2L).row("c", 4L).row("c", 2L).row("d", 6L).row("d", 3L).build();
ImmutableList.Builder<Page> outputPages = ImmutableList.builder();
Page p = orderByOperator.getOutput();
while (p instanceof MarkerPage) {
p = orderByOperator.getOutput();
}
outputPages.add(p);
MaterializedResult actual = toMaterializedResult(driverContext.getSession(), expected.getTypes(), outputPages.build());
Assert.assertEquals(actual, expected);
}
use of io.prestosql.snapshot.SnapshotUtils in project hetu-core by openlookeng.
the class TestPartitionedOutputOperator method testPartitionedOutputOperatorSnapshot.
@Test
public void testPartitionedOutputOperatorSnapshot() throws Exception {
SnapshotUtils snapshotUtils = mock(SnapshotUtils.class);
PartitionedOutputBuffer buffer = mock(PartitionedOutputBuffer.class);
PartitionedOutputOperator operator = createPartitionedOutputOperator(snapshotUtils, buffer);
operator.getOperatorContext().getDriverContext().getPipelineContext().getTaskContext().getSnapshotManager().setTotalComponents(1);
List<Page> input = rowPagesBuilder(BIGINT).addSequencePage(1, 1).addSequencePage(2, 4).build();
MarkerPage marker = MarkerPage.snapshotPage(1);
MarkerPage marker2 = MarkerPage.snapshotPage(2);
MarkerPage marker3 = MarkerPage.snapshotPage(3);
MarkerPage resume = MarkerPage.resumePage(1);
// Add first page, then capture and compare, then add second page, then restore, then compare, then add second page, then finish, then compare
operator.addInput(input.get(0));
operator.addInput(marker);
ArgumentCaptor<Object> stateArgument = ArgumentCaptor.forClass(Object.class);
verify(snapshotUtils, times(1)).storeState(anyObject(), stateArgument.capture(), anyObject());
Object snapshot = stateArgument.getValue();
when(snapshotUtils.loadState(anyObject(), anyObject())).thenReturn(Optional.of(snapshot));
operator.addInput(input.get(1));
operator.addInput(resume);
operator.addInput(marker2);
verify(snapshotUtils, times(2)).storeState(anyObject(), stateArgument.capture(), anyObject());
snapshot = stateArgument.getValue();
Object snapshotEntry = ((Map<String, Object>) snapshot).get("query/2/1/1/0/0/0");
assertEquals(SnapshotTestUtil.toFullSnapshotMapping(snapshotEntry), createExpectedMappingBeforeFinish());
operator.addInput(input.get(1));
operator.finish();
operator.addInput(marker3);
verify(snapshotUtils, times(3)).storeState(anyObject(), stateArgument.capture(), anyObject());
snapshot = stateArgument.getValue();
snapshotEntry = ((Map<String, Object>) snapshot).get("query/3/1/1/0/0/0");
assertEquals(SnapshotTestUtil.toFullSnapshotMapping(snapshotEntry), createExpectedMappingAfterFinish());
ArgumentCaptor<List> pagesArgument = ArgumentCaptor.forClass(List.class);
verify(buffer, times(9)).enqueue(anyInt(), pagesArgument.capture(), anyString());
List<List> pages = pagesArgument.getAllValues();
// 9 pages:
// 1 (page 1 partitioned)
// 1 (marker 1)
// 2 (page 2 before resume)
// 1 (resume marker)
// 1 (marker 2)
// 2 (page 2 after resume)
// 1 (marker 3)
assertEquals(pages.size(), 9);
assertTrue(((SerializedPage) pages.get(1).get(0)).isMarkerPage());
assertTrue(((SerializedPage) pages.get(4).get(0)).isMarkerPage());
assertTrue(((SerializedPage) pages.get(5).get(0)).isMarkerPage());
assertTrue(((SerializedPage) pages.get(8).get(0)).isMarkerPage());
}
use of io.prestosql.snapshot.SnapshotUtils in project hetu-core by openlookeng.
the class TestWindowOperator method testCaptureRestoreWithoutSpill.
@Test
public void testCaptureRestoreWithoutSpill() {
SnapshotConfig snapshotConfig = new SnapshotConfig();
snapshotUtils = new SnapshotUtils(fileSystemClientManager, snapshotConfig, new InMemoryNodeManager());
snapshotUtils.initialize();
ImmutableList.Builder<Page> outputPages = ImmutableList.builder();
List<Page> input1 = rowPagesBuilder(VARCHAR, BIGINT, DOUBLE, BOOLEAN).row("b", -1L, -0.1, true).row("a", 2L, 0.3, false).row("a", 4L, 0.2, true).pageBreak().row("b", 5L, 0.4, false).row("a", 6L, 0.1, true).build();
List<Page> input2 = rowPagesBuilder(VARCHAR, BIGINT, DOUBLE, BOOLEAN).row("c", -1L, -0.1, true).row("d", 2L, 0.3, false).row("c", 4L, 0.2, true).pageBreak().row("d", 5L, 0.4, false).build();
WindowOperatorFactory operatorFactory = createFactoryUnbounded(ImmutableList.of(VARCHAR, BIGINT, DOUBLE, BOOLEAN), Ints.asList(0, 1, 2, 3), ROW_NUMBER, Ints.asList(0), Ints.asList(1), ImmutableList.copyOf(new SortOrder[] { SortOrder.ASC_NULLS_LAST }), false);
DriverContext driverContext = createDriverContext(defaultMemoryLimit, TEST_SNAPSHOT_SESSION);
WindowOperator windowOperator = (WindowOperator) operatorFactory.createOperator(driverContext);
// Step1: add the first 2 pages
for (Page page : input1) {
windowOperator.addInput(page);
windowOperator.getOutput();
}
// Step2: add a marker page to make 'capture1' happened
MarkerPage marker = MarkerPage.snapshotPage(1);
windowOperator.addInput(marker);
windowOperator.getOutput();
// Step3: add another 2 pages
for (Page page : input2) {
windowOperator.addInput(page);
windowOperator.getOutput();
}
// Step4: assume the task is rescheduled due to failure and everything is re-constructed
driverContext = createDriverContext(8, TEST_SNAPSHOT_SESSION);
operatorFactory = createFactoryUnbounded(ImmutableList.of(VARCHAR, BIGINT, DOUBLE, BOOLEAN), Ints.asList(0, 1, 2, 3), ROW_NUMBER, Ints.asList(0), Ints.asList(1), ImmutableList.copyOf(new SortOrder[] { SortOrder.ASC_NULLS_LAST }), false);
windowOperator = (WindowOperator) operatorFactory.createOperator(driverContext);
// Step5: restore to 'capture1'
MarkerPage resumeMarker = MarkerPage.resumePage(1);
windowOperator.addInput(resumeMarker);
windowOperator.getOutput();
// Step6: continue to add another 2 pages
for (Page page : input2) {
windowOperator.addInput(page);
windowOperator.getOutput();
}
windowOperator.finish();
// Compare the results
MaterializedResult expected = resultBuilder(driverContext.getSession(), VARCHAR, BIGINT, DOUBLE, BOOLEAN, BIGINT).row("a", 2L, 0.3, false, 1L).row("a", 4L, 0.2, true, 2L).row("a", 6L, 0.1, true, 3L).row("b", -1L, -0.1, true, 1L).row("b", 5L, 0.4, false, 2L).row("c", -1L, -0.1, true, 1L).row("c", 4L, 0.2, true, 2L).row("d", 2L, 0.3, false, 1L).row("d", 5L, 0.4, false, 2L).build();
Page p = windowOperator.getOutput();
while (p == null) {
p = windowOperator.getOutput();
}
outputPages.add(p);
MaterializedResult actual = toMaterializedResult(driverContext.getSession(), expected.getTypes(), outputPages.build());
Assert.assertEquals(actual, expected);
}
use of io.prestosql.snapshot.SnapshotUtils in project hetu-core by openlookeng.
the class TestWindowOperator method testCaptureRestoreWithSpillToHdfsEnabled.
@Test
public void testCaptureRestoreWithSpillToHdfsEnabled() throws Exception {
// Initialization
Path spillPath = Paths.get("/tmp/hetu/snapshot/");
HetuHdfsFileSystemClient fs = getLocalHdfs();
when(fileSystemClientManager.getFileSystemClient(any(String.class), any(Path.class))).thenReturn(fs);
GenericSpillerFactory genericSpillerFactory = createGenericSpillerFactory(spillPath, fileSystemClientManager, false, null);
SnapshotConfig snapshotConfig = new SnapshotConfig();
snapshotConfig.setSpillProfile("hdfs");
snapshotConfig.setSpillToHdfs(true);
snapshotUtils = new SnapshotUtils(fileSystemClientManager, snapshotConfig, new InMemoryNodeManager());
snapshotUtils.initialize();
ImmutableList.Builder<Page> outputPages = ImmutableList.builder();
List<Page> input1 = rowPagesBuilder(VARCHAR, BIGINT, DOUBLE, BOOLEAN).row("b", -1L, -0.1, true).row("a", 2L, 0.3, false).row("a", 4L, 0.2, true).pageBreak().row("b", 5L, 0.4, false).row("a", 6L, 0.1, true).build();
List<Page> input2 = rowPagesBuilder(VARCHAR, BIGINT, DOUBLE, BOOLEAN).row("c", -1L, -0.1, true).row("d", 2L, 0.3, false).row("c", 4L, 0.2, true).pageBreak().row("d", 5L, 0.4, false).build();
WindowOperatorFactory operatorFactory = new WindowOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(VARCHAR, BIGINT, DOUBLE, BOOLEAN), Ints.asList(0, 1, 2, 3), ROW_NUMBER, Ints.asList(0), ImmutableList.of(), Ints.asList(1), ImmutableList.copyOf(new SortOrder[] { SortOrder.ASC_NULLS_LAST }), 0, 10, new PagesIndex.TestingFactory(false), true, genericSpillerFactory, new OrderingCompiler());
DriverContext driverContext = createDriverContext(defaultMemoryLimit, TEST_SNAPSHOT_SESSION);
WindowOperator windowOperator = (WindowOperator) operatorFactory.createOperator(driverContext);
// Step1: add the first 2 pages
for (Page page : input1) {
windowOperator.addInput(page);
windowOperator.getOutput();
}
// Step2: spilling happened here
getFutureValue(windowOperator.startMemoryRevoke());
windowOperator.finishMemoryRevoke();
// Step3: add a marker page to make 'capture1' happened
MarkerPage marker = MarkerPage.snapshotPage(1);
windowOperator.addInput(marker);
windowOperator.getOutput();
// Step4: add another 2 pages
for (Page page : input2) {
windowOperator.addInput(page);
windowOperator.getOutput();
}
// Step5: assume the task is rescheduled due to failure and everything is re-constructed
driverContext = createDriverContext(8, TEST_SNAPSHOT_SESSION);
operatorFactory = new WindowOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(VARCHAR, BIGINT, DOUBLE, BOOLEAN), Ints.asList(0, 1, 2, 3), ROW_NUMBER, Ints.asList(0), ImmutableList.of(), Ints.asList(1), ImmutableList.copyOf(new SortOrder[] { SortOrder.ASC_NULLS_LAST }), 0, 10, new PagesIndex.TestingFactory(false), true, genericSpillerFactory, new OrderingCompiler());
windowOperator = (WindowOperator) operatorFactory.createOperator(driverContext);
// Step6: restore to 'capture1', the spiller should contains the reference of the first 2 pages for now.
MarkerPage resumeMarker = MarkerPage.resumePage(1);
windowOperator.addInput(resumeMarker);
windowOperator.getOutput();
// Step7: continue to add another 2 pages
for (Page page : input2) {
windowOperator.addInput(page);
windowOperator.getOutput();
}
windowOperator.finish();
// Compare the results
MaterializedResult expected = resultBuilder(driverContext.getSession(), VARCHAR, BIGINT, DOUBLE, BOOLEAN, BIGINT).row("a", 2L, 0.3, false, 1L).row("a", 4L, 0.2, true, 2L).row("a", 6L, 0.1, true, 3L).row("b", -1L, -0.1, true, 1L).row("b", 5L, 0.4, false, 2L).row("c", -1L, -0.1, true, 1L).row("c", 4L, 0.2, true, 2L).row("d", 2L, 0.3, false, 1L).row("d", 5L, 0.4, false, 2L).build();
Page p = windowOperator.getOutput();
while (p == null) {
p = windowOperator.getOutput();
}
outputPages.add(p);
MaterializedResult actual = toMaterializedResult(driverContext.getSession(), expected.getTypes(), outputPages.build());
Assert.assertEquals(actual, expected);
}
Aggregations