use of io.prestosql.spi.block.SortOrder in project hetu-core by openlookeng.
the class TestWindowOperator method testRowNumber.
@Test(dataProvider = "spillEnabled")
public void testRowNumber(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit) {
List<Page> input = rowPagesBuilder(BIGINT, DOUBLE).row(2L, 0.3).row(4L, 0.2).row(6L, 0.1).pageBreak().row(-1L, -0.1).row(5L, 0.4).build();
WindowOperatorFactory operatorFactory = createFactoryUnbounded(ImmutableList.of(BIGINT, DOUBLE), Ints.asList(1, 0), ROW_NUMBER, Ints.asList(), Ints.asList(0), ImmutableList.copyOf(new SortOrder[] { SortOrder.ASC_NULLS_LAST }), spillEnabled);
DriverContext driverContext = createDriverContext(memoryLimit, TEST_SESSION);
MaterializedResult expected = resultBuilder(driverContext.getSession(), DOUBLE, BIGINT, BIGINT).row(-0.1, -1L, 1L).row(0.3, 2L, 2L).row(0.2, 4L, 3L).row(0.4, 5L, 4L).row(0.1, 6L, 5L).build();
assertOperatorEquals(operatorFactory, driverContext, input, expected, revokeMemoryWhenAddingPages);
}
use of io.prestosql.spi.block.SortOrder in project hetu-core by openlookeng.
the class TestWindowOperator method testCaptureRestoreWithSpillToHdfsEnabled.
@Test
public void testCaptureRestoreWithSpillToHdfsEnabled() throws Exception {
// Initialization
Path spillPath = Paths.get("/tmp/hetu/snapshot/");
HetuHdfsFileSystemClient fs = getLocalHdfs();
when(fileSystemClientManager.getFileSystemClient(any(String.class), any(Path.class))).thenReturn(fs);
GenericSpillerFactory genericSpillerFactory = createGenericSpillerFactory(spillPath, fileSystemClientManager, false, null);
SnapshotConfig snapshotConfig = new SnapshotConfig();
snapshotConfig.setSpillProfile("hdfs");
snapshotConfig.setSpillToHdfs(true);
snapshotUtils = new SnapshotUtils(fileSystemClientManager, snapshotConfig, new InMemoryNodeManager());
snapshotUtils.initialize();
ImmutableList.Builder<Page> outputPages = ImmutableList.builder();
List<Page> input1 = rowPagesBuilder(VARCHAR, BIGINT, DOUBLE, BOOLEAN).row("b", -1L, -0.1, true).row("a", 2L, 0.3, false).row("a", 4L, 0.2, true).pageBreak().row("b", 5L, 0.4, false).row("a", 6L, 0.1, true).build();
List<Page> input2 = rowPagesBuilder(VARCHAR, BIGINT, DOUBLE, BOOLEAN).row("c", -1L, -0.1, true).row("d", 2L, 0.3, false).row("c", 4L, 0.2, true).pageBreak().row("d", 5L, 0.4, false).build();
WindowOperatorFactory operatorFactory = new WindowOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(VARCHAR, BIGINT, DOUBLE, BOOLEAN), Ints.asList(0, 1, 2, 3), ROW_NUMBER, Ints.asList(0), ImmutableList.of(), Ints.asList(1), ImmutableList.copyOf(new SortOrder[] { SortOrder.ASC_NULLS_LAST }), 0, 10, new PagesIndex.TestingFactory(false), true, genericSpillerFactory, new OrderingCompiler());
DriverContext driverContext = createDriverContext(defaultMemoryLimit, TEST_SNAPSHOT_SESSION);
WindowOperator windowOperator = (WindowOperator) operatorFactory.createOperator(driverContext);
// Step1: add the first 2 pages
for (Page page : input1) {
windowOperator.addInput(page);
windowOperator.getOutput();
}
// Step2: spilling happened here
getFutureValue(windowOperator.startMemoryRevoke());
windowOperator.finishMemoryRevoke();
// Step3: add a marker page to make 'capture1' happened
MarkerPage marker = MarkerPage.snapshotPage(1);
windowOperator.addInput(marker);
windowOperator.getOutput();
// Step4: add another 2 pages
for (Page page : input2) {
windowOperator.addInput(page);
windowOperator.getOutput();
}
// Step5: assume the task is rescheduled due to failure and everything is re-constructed
driverContext = createDriverContext(8, TEST_SNAPSHOT_SESSION);
operatorFactory = new WindowOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(VARCHAR, BIGINT, DOUBLE, BOOLEAN), Ints.asList(0, 1, 2, 3), ROW_NUMBER, Ints.asList(0), ImmutableList.of(), Ints.asList(1), ImmutableList.copyOf(new SortOrder[] { SortOrder.ASC_NULLS_LAST }), 0, 10, new PagesIndex.TestingFactory(false), true, genericSpillerFactory, new OrderingCompiler());
windowOperator = (WindowOperator) operatorFactory.createOperator(driverContext);
// Step6: restore to 'capture1', the spiller should contains the reference of the first 2 pages for now.
MarkerPage resumeMarker = MarkerPage.resumePage(1);
windowOperator.addInput(resumeMarker);
windowOperator.getOutput();
// Step7: continue to add another 2 pages
for (Page page : input2) {
windowOperator.addInput(page);
windowOperator.getOutput();
}
windowOperator.finish();
// Compare the results
MaterializedResult expected = resultBuilder(driverContext.getSession(), VARCHAR, BIGINT, DOUBLE, BOOLEAN, BIGINT).row("a", 2L, 0.3, false, 1L).row("a", 4L, 0.2, true, 2L).row("a", 6L, 0.1, true, 3L).row("b", -1L, -0.1, true, 1L).row("b", 5L, 0.4, false, 2L).row("c", -1L, -0.1, true, 1L).row("c", 4L, 0.2, true, 2L).row("d", 2L, 0.3, false, 1L).row("d", 5L, 0.4, false, 2L).build();
Page p = windowOperator.getOutput();
while (p == null) {
p = windowOperator.getOutput();
}
outputPages.add(p);
MaterializedResult actual = toMaterializedResult(driverContext.getSession(), expected.getTypes(), outputPages.build());
Assert.assertEquals(actual, expected);
}
use of io.prestosql.spi.block.SortOrder in project hetu-core by openlookeng.
the class TestWindowOperator method testRowNumberSnapshot.
@Test(dataProvider = "spillEnabledSnapshot")
public void testRowNumberSnapshot(boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimit) {
List<Page> input = rowPagesBuilder(BIGINT, DOUBLE).row(2L, 0.3).row(4L, 0.2).row(6L, 0.1).pageBreak().row(-1L, -0.1).row(5L, 0.4).build();
WindowOperatorFactory operatorFactory = createFactoryUnbounded(ImmutableList.of(BIGINT, DOUBLE), Ints.asList(1, 0), ROW_NUMBER, Ints.asList(), Ints.asList(0), ImmutableList.copyOf(new SortOrder[] { SortOrder.ASC_NULLS_LAST }), spillEnabled);
DriverContext driverContext = createDriverContext(memoryLimit);
MaterializedResult expected = resultBuilder(driverContext.getSession(), DOUBLE, BIGINT, BIGINT).row(-0.1, -1L, 1L).row(0.3, 2L, 2L).row(0.2, 4L, 3L).row(0.4, 5L, 4L).row(0.1, 6L, 5L).build();
assertOperatorEqualsWithRestoreToNewOperator(operatorFactory, driverContext, () -> createDriverContext(memoryLimit), input, expected, revokeMemoryWhenAddingPages);
}
use of io.prestosql.spi.block.SortOrder in project hetu-core by openlookeng.
the class QueryPlanner method planUpdateRowAsInsert.
public UpdateDeleteRelationPlan planUpdateRowAsInsert(Update node) {
Table table = node.getTable();
RelationType descriptor = analysis.getOutputDescriptor(table);
TableHandle handle = analysis.getTableHandle(table);
ColumnHandle rowIdHandle = analysis.getRowIdHandle(table);
ColumnMetadata rowIdColumnMetadata = metadata.getColumnMetadata(session, handle, rowIdHandle);
// add table columns
ImmutableList.Builder<Symbol> outputSymbols = ImmutableList.builder();
ImmutableMap.Builder<Symbol, ColumnHandle> columnsBuilder = ImmutableMap.builder();
ImmutableList.Builder<Field> fields = ImmutableList.builder();
for (Field field : descriptor.getAllFields()) {
Symbol symbol = planSymbolAllocator.newSymbol(field);
outputSymbols.add(symbol);
columnsBuilder.put(symbol, analysis.getColumn(field));
fields.add(field);
}
// create table scan
ImmutableMap<Symbol, ColumnHandle> columns = columnsBuilder.build();
PlanNode tableScan = TableScanNode.newInstance(idAllocator.getNextId(), handle, outputSymbols.build(), columns, ReuseExchangeOperator.STRATEGY.REUSE_STRATEGY_DEFAULT, new UUID(0, 0), 0, true);
Scope scope = Scope.builder().withRelationType(RelationId.anonymous(), new RelationType(fields.build())).build();
RelationPlan relationPlan = new RelationPlan(tableScan, scope, outputSymbols.build());
TranslationMap translations = new TranslationMap(relationPlan, analysis, lambdaDeclarationToSymbolMap);
translations.setFieldMappings(relationPlan.getFieldMappings());
PlanBuilder builder = new PlanBuilder(translations, relationPlan.getRoot());
Optional<RowExpression> predicate = Optional.empty();
if (node.getWhere().isPresent()) {
builder = filter(builder, node.getWhere().get(), node);
if (builder.getRoot() instanceof FilterNode) {
predicate = Optional.of(((FilterNode) builder.getRoot()).getPredicate());
}
}
List<AssignmentItem> assignmentItems = node.getAssignmentItems();
Analysis.Update update = analysis.getUpdate().get();
Assignments.Builder assignments = Assignments.builder();
TableMetadata tableMetadata = metadata.getTableMetadata(session, update.getTarget());
Symbol orderBySymbol = null;
for (Map.Entry<Symbol, ColumnHandle> entry : columns.entrySet()) {
ColumnMetadata column;
ColumnHandle columnHandle = entry.getValue();
Symbol input = entry.getKey();
if (columnHandle.getColumnName().equals(rowIdHandle.getColumnName())) {
column = rowIdColumnMetadata;
} else {
column = tableMetadata.getColumn(columnHandle.getColumnName());
}
if (column != rowIdColumnMetadata && column.isHidden()) {
continue;
}
Symbol output = planSymbolAllocator.newSymbol(column.getName(), column.getType());
Type tableType = column.getType();
Type queryType = planSymbolAllocator.getTypes().get(input);
List<AssignmentItem> assignment = assignmentItems.stream().filter(item -> item.getName().equals(QualifiedName.of(column.getName()))).collect(Collectors.toList());
if (!assignment.isEmpty()) {
Expression expression = assignment.get(0).getValue();
Expression cast;
if (expression instanceof Identifier) {
// assigning by column reference
Optional<Symbol> first = columns.entrySet().stream().filter(e -> e.getValue().getColumnName().equals(((Identifier) expression).getValue())).map(Entry::getKey).findFirst();
Symbol source = (first.orElseThrow(() -> new IllegalArgumentException("Unable to find column " + ((Identifier) expression).getValue())));
cast = new Cast(toSymbolReference(source), tableType.getTypeSignature().toString());
} else {
cast = new Cast(expression, tableType.getTypeSignature().toString());
}
assignments.put(output, castToRowExpression(cast));
} else if (queryType.equals(tableType) || typeCoercion.isTypeOnlyCoercion(queryType, tableType)) {
assignments.put(output, castToRowExpression(toSymbolReference(input)));
} else {
Expression cast = new Cast(toSymbolReference(input), tableType.getTypeSignature().toString());
assignments.put(output, castToRowExpression(cast));
}
if (column == rowIdColumnMetadata) {
orderBySymbol = output;
}
}
ProjectNode projectNode = new ProjectNode(idAllocator.getNextId(), builder.getRoot(), assignments.build());
PlanBuilder planBuilder = new PlanBuilder(translations, projectNode);
SortOrder sortOrder = SortOrder.ASC_NULLS_LAST;
Symbol sortSymbol = orderBySymbol;
Map<Symbol, SortOrder> sortOrderMap = ImmutableMap.<Symbol, SortOrder>builder().put(sortSymbol, sortOrder).build();
OrderingScheme orderingScheme = new OrderingScheme(ImmutableList.of(sortSymbol), sortOrderMap);
builder = sort(planBuilder, Optional.of(orderingScheme));
ImmutableList.Builder<Field> projectFields = ImmutableList.builder();
projectFields.addAll(fields.build().stream().filter(x -> !x.isHidden()).collect(toImmutableList()));
scope = Scope.builder().withRelationType(RelationId.anonymous(), new RelationType(projectFields.build())).build();
RelationPlan plan = new RelationPlan(builder.getRoot(), scope, projectNode.getOutputSymbols());
List<String> visibleTableColumnNames = tableMetadata.getColumns().stream().filter(c -> !c.isHidden()).map(ColumnMetadata::getName).collect(Collectors.toList());
visibleTableColumnNames.add(rowIdColumnMetadata.getName());
return new UpdateDeleteRelationPlan(plan, visibleTableColumnNames, columns, predicate);
}
use of io.prestosql.spi.block.SortOrder in project hetu-core by openlookeng.
the class QueryPlanner method orderingScheme.
private Optional<OrderingScheme> orderingScheme(PlanBuilder subPlan, Optional<OrderBy> orderBy, List<Expression> orderByExpressions) {
if (!orderBy.isPresent() || (isSkipRedundantSort(session)) && analysis.isOrderByRedundant(orderBy.get())) {
return Optional.empty();
}
Iterator<SortItem> sortItems = orderBy.get().getSortItems().iterator();
ImmutableList.Builder<Symbol> orderBySymbols = ImmutableList.builder();
Map<Symbol, SortOrder> orderings = new HashMap<>();
for (Expression fieldOrExpression : orderByExpressions) {
Symbol symbol = subPlan.translate(fieldOrExpression);
SortItem sortItem = sortItems.next();
if (!orderings.containsKey(symbol)) {
orderBySymbols.add(symbol);
orderings.put(symbol, sortItemToSortOrder(sortItem));
}
}
return Optional.of(new OrderingScheme(orderBySymbols.build(), orderings));
}
Aggregations