use of com.facebook.presto.sql.planner.plan.PlanNodeId in project presto by prestodb.
the class TestTopNRowNumberOperator method testTopNRowNumberUnPartitioned.
@Test
public void testTopNRowNumberUnPartitioned() throws Exception {
List<Page> input = rowPagesBuilder(BIGINT, DOUBLE).row(1L, 0.3).row(2L, 0.2).row(3L, 0.1).row(3L, 0.91).pageBreak().row(1L, 0.4).pageBreak().row(1L, 0.5).row(1L, 0.6).row(2L, 0.7).row(2L, 0.8).pageBreak().row(2L, 0.9).build();
TopNRowNumberOperatorFactory operatorFactory = new TopNRowNumberOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(BIGINT, DOUBLE), Ints.asList(1, 0), Ints.asList(), ImmutableList.of(), Ints.asList(1), ImmutableList.of(SortOrder.ASC_NULLS_LAST), 3, false, Optional.empty(), 10, joinCompiler);
MaterializedResult expected = resultBuilder(driverContext.getSession(), DOUBLE, BIGINT, BIGINT).row(0.1, 3L, 1L).row(0.2, 2L, 2L).row(0.3, 1L, 3L).build();
assertOperatorEquals(operatorFactory, driverContext, input, expected);
}
use of com.facebook.presto.sql.planner.plan.PlanNodeId in project presto by prestodb.
the class TestUnnestOperator method testUnnestWithOrdinality.
@Test
public void testUnnestWithOrdinality() throws Exception {
MetadataManager metadata = createTestMetadataManager();
Type arrayType = metadata.getType(parseTypeSignature("array(bigint)"));
Type mapType = metadata.getType(parseTypeSignature("map(bigint,bigint)"));
List<Page> input = rowPagesBuilder(BIGINT, arrayType, mapType).row(1L, arrayBlockOf(BIGINT, 2, 3), mapBlockOf(BIGINT, BIGINT, ImmutableMap.of(4, 5))).row(2L, arrayBlockOf(BIGINT, 99), null).row(3L, null, null).pageBreak().row(6L, arrayBlockOf(BIGINT, 7, 8), mapBlockOf(BIGINT, BIGINT, ImmutableMap.of(9, 10, 11, 12))).build();
OperatorFactory operatorFactory = new UnnestOperator.UnnestOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(0), ImmutableList.of(BIGINT), ImmutableList.of(1, 2), ImmutableList.of(arrayType, mapType), true);
MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT, BIGINT, BIGINT, BIGINT, BIGINT).row(1L, 2L, 4L, 5L, 1L).row(1L, 3L, null, null, 2L).row(2L, 99L, null, null, 1L).row(6L, 7L, 9L, 10L, 1L).row(6L, 8L, 11L, 12L, 2L).build();
assertOperatorEquals(operatorFactory, driverContext, input, expected);
}
use of com.facebook.presto.sql.planner.plan.PlanNodeId in project presto by prestodb.
the class TestUnnestOperator method testUnnest.
@Test
public void testUnnest() throws Exception {
MetadataManager metadata = createTestMetadataManager();
Type arrayType = metadata.getType(parseTypeSignature("array(bigint)"));
Type mapType = metadata.getType(parseTypeSignature("map(bigint,bigint)"));
List<Page> input = rowPagesBuilder(BIGINT, arrayType, mapType).row(1L, arrayBlockOf(BIGINT, 2, 3), mapBlockOf(BIGINT, BIGINT, ImmutableMap.of(4, 5))).row(2L, arrayBlockOf(BIGINT, 99), null).row(3L, null, null).pageBreak().row(6L, arrayBlockOf(BIGINT, 7, 8), mapBlockOf(BIGINT, BIGINT, ImmutableMap.of(9, 10, 11, 12))).build();
OperatorFactory operatorFactory = new UnnestOperator.UnnestOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(0), ImmutableList.of(BIGINT), ImmutableList.of(1, 2), ImmutableList.of(arrayType, mapType), false);
MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT, BIGINT, BIGINT, BIGINT).row(1L, 2L, 4L, 5L).row(1L, 3L, null, null).row(2L, 99L, null, null).row(6L, 7L, 9L, 10L).row(6L, 8L, 11L, 12L).build();
assertOperatorEquals(operatorFactory, driverContext, input, expected);
}
use of com.facebook.presto.sql.planner.plan.PlanNodeId in project presto by prestodb.
the class TestUnnestOperator method testUnnestWithArray.
@Test
public void testUnnestWithArray() throws Exception {
MetadataManager metadata = createTestMetadataManager();
Type arrayType = metadata.getType(parseTypeSignature("array(array(bigint))"));
Type mapType = metadata.getType(parseTypeSignature("map(array(bigint),array(bigint))"));
List<Page> input = rowPagesBuilder(BIGINT, arrayType, mapType).row(1L, arrayBlockOf(new ArrayType(BIGINT), ImmutableList.of(2, 4), ImmutableList.of(3, 6)), mapBlockOf(new ArrayType(BIGINT), new ArrayType(BIGINT), ImmutableMap.of(ImmutableList.of(4, 8), ImmutableList.of(5, 10)))).row(2L, arrayBlockOf(new ArrayType(BIGINT), ImmutableList.of(99, 198)), null).row(3L, null, null).pageBreak().row(6, arrayBlockOf(new ArrayType(BIGINT), ImmutableList.of(7, 14), ImmutableList.of(8, 16)), mapBlockOf(new ArrayType(BIGINT), new ArrayType(BIGINT), ImmutableMap.of(ImmutableList.of(9, 18), ImmutableList.of(10, 20), ImmutableList.of(11, 22), ImmutableList.of(12, 24)))).build();
OperatorFactory operatorFactory = new UnnestOperator.UnnestOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(0), ImmutableList.of(BIGINT), ImmutableList.of(1, 2), ImmutableList.of(arrayType, mapType), false);
MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT, new ArrayType(BIGINT), new ArrayType(BIGINT), new ArrayType(BIGINT)).row(1L, ImmutableList.of(2L, 4L), ImmutableList.of(4L, 8L), ImmutableList.of(5L, 10L)).row(1L, ImmutableList.of(3L, 6L), null, null).row(2L, ImmutableList.of(99L, 198L), null, null).row(6L, ImmutableList.of(7L, 14L), ImmutableList.of(9L, 18L), ImmutableList.of(10L, 20L)).row(6L, ImmutableList.of(8L, 16L), ImmutableList.of(11L, 22L), ImmutableList.of(12L, 24L)).build();
assertOperatorEquals(operatorFactory, driverContext, input, expected);
}
use of com.facebook.presto.sql.planner.plan.PlanNodeId in project presto by prestodb.
the class PlanPrinter method getPlanNodeStats.
private static List<PlanNodeStats> getPlanNodeStats(TaskStats taskStats) {
// Best effort to reconstruct the plan nodes from operators.
// Because stats are collected separately from query execution,
// it's possible that some or all of them are missing or out of date.
// For example, a LIMIT clause can cause a query to finish before stats
// are collected from the leaf stages.
Map<PlanNodeId, Long> planNodeInputPositions = new HashMap<>();
Map<PlanNodeId, Long> planNodeInputBytes = new HashMap<>();
Map<PlanNodeId, Long> planNodeOutputPositions = new HashMap<>();
Map<PlanNodeId, Long> planNodeOutputBytes = new HashMap<>();
Map<PlanNodeId, Long> planNodeWallMillis = new HashMap<>();
Map<PlanNodeId, Map<String, OperatorInputStats>> operatorInputStats = new HashMap<>();
Map<PlanNodeId, Map<String, OperatorHashCollisionsStats>> operatorHashCollisionsStats = new HashMap<>();
for (PipelineStats pipelineStats : taskStats.getPipelines()) {
// Due to eventual consistently collected stats, these could be empty
if (pipelineStats.getOperatorSummaries().isEmpty()) {
continue;
}
Set<PlanNodeId> processedNodes = new HashSet<>();
PlanNodeId inputPlanNode = pipelineStats.getOperatorSummaries().iterator().next().getPlanNodeId();
PlanNodeId outputPlanNode = getLast(pipelineStats.getOperatorSummaries()).getPlanNodeId();
// Gather input statistics
for (OperatorStats operatorStats : pipelineStats.getOperatorSummaries()) {
PlanNodeId planNodeId = operatorStats.getPlanNodeId();
long wall = operatorStats.getAddInputWall().toMillis() + operatorStats.getGetOutputWall().toMillis() + operatorStats.getFinishWall().toMillis();
planNodeWallMillis.merge(planNodeId, wall, Long::sum);
// A pipeline like hash build before join might link to another "internal" pipelines which provide actual input for this plan node
if (operatorStats.getPlanNodeId().equals(inputPlanNode) && !pipelineStats.isInputPipeline()) {
continue;
}
if (processedNodes.contains(planNodeId)) {
continue;
}
operatorInputStats.merge(planNodeId, ImmutableMap.of(operatorStats.getOperatorType(), new OperatorInputStats(operatorStats.getTotalDrivers(), operatorStats.getInputPositions(), operatorStats.getSumSquaredInputPositions())), PlanPrinter::mergeOperatorInputStatsMaps);
if (operatorStats.getInfo() instanceof HashCollisionsInfo) {
HashCollisionsInfo hashCollisionsInfo = (HashCollisionsInfo) operatorStats.getInfo();
operatorHashCollisionsStats.merge(planNodeId, ImmutableMap.of(operatorStats.getOperatorType(), new OperatorHashCollisionsStats(hashCollisionsInfo.getWeightedHashCollisions(), hashCollisionsInfo.getWeightedSumSquaredHashCollisions(), hashCollisionsInfo.getWeightedExpectedHashCollisions())), PlanPrinter::mergeOperatorHashCollisionsStatsMaps);
}
planNodeInputPositions.merge(planNodeId, operatorStats.getInputPositions(), Long::sum);
planNodeInputBytes.merge(planNodeId, operatorStats.getInputDataSize().toBytes(), Long::sum);
processedNodes.add(planNodeId);
}
// Gather output statistics
processedNodes.clear();
for (OperatorStats operatorStats : reverse(pipelineStats.getOperatorSummaries())) {
PlanNodeId planNodeId = operatorStats.getPlanNodeId();
// An "internal" pipeline like a hash build, links to another pipeline which is the actual output for this plan node
if (operatorStats.getPlanNodeId().equals(outputPlanNode) && !pipelineStats.isOutputPipeline()) {
continue;
}
if (processedNodes.contains(planNodeId)) {
continue;
}
planNodeOutputPositions.merge(planNodeId, operatorStats.getOutputPositions(), Long::sum);
planNodeOutputBytes.merge(planNodeId, operatorStats.getOutputDataSize().toBytes(), Long::sum);
processedNodes.add(planNodeId);
}
}
List<PlanNodeStats> stats = new ArrayList<>();
for (Map.Entry<PlanNodeId, Long> entry : planNodeWallMillis.entrySet()) {
PlanNodeId planNodeId = entry.getKey();
stats.add(new PlanNodeStats(planNodeId, new Duration(planNodeWallMillis.get(planNodeId), MILLISECONDS), planNodeInputPositions.get(planNodeId), succinctDataSize(planNodeInputBytes.get(planNodeId), BYTE), // and therefore only have wall time, but no output stats
planNodeOutputPositions.getOrDefault(planNodeId, 0L), succinctDataSize(planNodeOutputBytes.getOrDefault(planNodeId, 0L), BYTE), operatorInputStats.get(planNodeId), // Only some operators emit hash collisions statistics
operatorHashCollisionsStats.getOrDefault(planNodeId, emptyMap())));
}
return stats;
}
Aggregations