use of io.prestosql.spi.statistics.ColumnStatisticMetadata in project hetu-core by openlookeng.
the class TestStatisticAggregationsDescriptor method testColumnStatisticMetadataKeySerializationRoundTrip.
@Test
public void testColumnStatisticMetadataKeySerializationRoundTrip() {
for (String column : COLUMNS) {
for (ColumnStatisticType type : ColumnStatisticType.values()) {
ColumnStatisticMetadata expected = new ColumnStatisticMetadata(column, type);
assertEquals(deserialize(serialize(expected)), expected);
}
}
}
use of io.prestosql.spi.statistics.ColumnStatisticMetadata in project hetu-core by openlookeng.
the class TestTableFinishOperator method testStatisticsAggregationSnapshot.
@Test
public void testStatisticsAggregationSnapshot() throws Exception {
TestTableFinisher tableFinisher = new TestTableFinisher();
ColumnStatisticMetadata statisticMetadata = new ColumnStatisticMetadata("column", MAX_VALUE);
StatisticAggregationsDescriptor<Integer> descriptor = new StatisticAggregationsDescriptor<>(ImmutableMap.of(), ImmutableMap.of(), ImmutableMap.of(statisticMetadata, 0));
Session session = testSessionBuilder().setSystemProperty("statistics_cpu_timer_enabled", "true").build();
TableFinishOperatorFactory operatorFactory = new TableFinishOperatorFactory(0, new PlanNodeId("node"), tableFinisher, new AggregationOperator.AggregationOperatorFactory(1, new PlanNodeId("test"), AggregationNode.Step.SINGLE, ImmutableList.of(LONG_MAX.bind(ImmutableList.of(2), Optional.empty())), true), descriptor, session);
DriverContext driverContext = createTaskContext(scheduledExecutor, scheduledExecutor, session).addPipelineContext(0, true, true, false).addDriverContext();
TableFinishOperator operator = (TableFinishOperator) operatorFactory.createOperator(driverContext);
List<Type> inputTypes = ImmutableList.of(BIGINT, VARBINARY, BIGINT);
operator.addInput(rowPagesBuilder(inputTypes).row(4, null, null).build().get(0));
operator.addInput(rowPagesBuilder(inputTypes).row(5, null, null).build().get(0));
operator.addInput(rowPagesBuilder(inputTypes).row(null, new byte[] { 1 }, null).build().get(0));
Object snapshot = operator.capture(operator.getOperatorContext().getDriverContext().getSerde());
Map<String, Object> snapshotMapping = (Map<String, Object>) SnapshotTestUtil.toFullSnapshotMapping(snapshot);
assertEquals(snapshotMapping, createExpectedMapping());
operator.addInput(rowPagesBuilder(inputTypes).row(null, new byte[] { 2 }, null).build().get(0));
operator.addInput(rowPagesBuilder(inputTypes).row(null, null, 6).build().get(0));
operator.addInput(rowPagesBuilder(inputTypes).row(null, null, 7).build().get(0));
operator.restore(snapshot, operator.getOperatorContext().getDriverContext().getSerde());
operator.addInput(rowPagesBuilder(inputTypes).row(null, new byte[] { 2 }, null).build().get(0));
operator.addInput(rowPagesBuilder(inputTypes).row(null, null, 6).build().get(0));
operator.addInput(rowPagesBuilder(inputTypes).row(null, null, 7).build().get(0));
assertThat(driverContext.getSystemMemoryUsage()).isGreaterThan(0);
assertEquals(driverContext.getMemoryUsage(), 0);
assertTrue(operator.isBlocked().isDone());
assertTrue(operator.needsInput());
operator.finish();
assertFalse(operator.isFinished());
assertNull(operator.getOutput());
List<Type> outputTypes = ImmutableList.of(BIGINT);
assertPageEquals(outputTypes, operator.getOutput(), rowPagesBuilder(outputTypes).row(9).build().get(0));
assertTrue(operator.isBlocked().isDone());
assertFalse(operator.needsInput());
assertTrue(operator.isFinished());
operator.close();
assertEquals(tableFinisher.getFragments(), ImmutableList.of(Slices.wrappedBuffer(new byte[] { 1 }), Slices.wrappedBuffer(new byte[] { 2 })));
assertEquals(tableFinisher.getComputedStatistics().size(), 1);
assertEquals(getOnlyElement(tableFinisher.getComputedStatistics()).getColumnStatistics().size(), 1);
Block expectedStatisticsBlock = new LongArrayBlockBuilder(null, 1).writeLong(7).closeEntry().build();
assertBlockEquals(BIGINT, getOnlyElement(tableFinisher.getComputedStatistics()).getColumnStatistics().get(statisticMetadata), expectedStatisticsBlock);
TableFinishInfo tableFinishInfo = operator.getInfo();
assertThat(tableFinishInfo.getStatisticsWallTime().getValue(NANOSECONDS)).isGreaterThan(0);
assertThat(tableFinishInfo.getStatisticsCpuTime().getValue(NANOSECONDS)).isGreaterThan(0);
assertEquals(driverContext.getSystemMemoryUsage(), 0);
assertEquals(driverContext.getMemoryUsage(), 0);
}
use of io.prestosql.spi.statistics.ColumnStatisticMetadata in project hetu-core by openlookeng.
the class StatisticsAggregationPlanner method createStatisticsAggregation.
public TableStatisticAggregation createStatisticsAggregation(TableStatisticsMetadata statisticsMetadata, Map<String, Symbol> columnToSymbolMap) {
StatisticAggregationsDescriptor.Builder<Symbol> descriptor = StatisticAggregationsDescriptor.builder();
List<String> groupingColumns = statisticsMetadata.getGroupingColumns();
List<Symbol> groupingSymbols = groupingColumns.stream().map(columnToSymbolMap::get).collect(toImmutableList());
for (int i = 0; i < groupingSymbols.size(); i++) {
descriptor.addGrouping(groupingColumns.get(i), groupingSymbols.get(i));
}
ImmutableMap.Builder<Symbol, AggregationNode.Aggregation> aggregations = ImmutableMap.builder();
StandardFunctionResolution functionResolution = new FunctionResolution(metadata.getFunctionAndTypeManager());
for (TableStatisticType type : statisticsMetadata.getTableStatistics()) {
if (type != ROW_COUNT) {
throw new PrestoException(NOT_SUPPORTED, "Table-wide statistic type not supported: " + type);
}
AggregationNode.Aggregation aggregation = new AggregationNode.Aggregation(new CallExpression("count", functionResolution.countFunction(), BIGINT, ImmutableList.of(), Optional.empty()), ImmutableList.of(), false, Optional.empty(), Optional.empty(), Optional.empty());
Symbol symbol = planSymbolAllocator.newSymbol("rowCount", BIGINT);
aggregations.put(symbol, aggregation);
descriptor.addTableStatistic(ROW_COUNT, symbol);
}
for (ColumnStatisticMetadata columnStatisticMetadata : statisticsMetadata.getColumnStatistics()) {
String columnName = columnStatisticMetadata.getColumnName();
ColumnStatisticType statisticType = columnStatisticMetadata.getStatisticType();
Symbol inputSymbol = columnToSymbolMap.get(columnName);
verify(inputSymbol != null, "inputSymbol is null");
Type inputType = planSymbolAllocator.getTypes().get(inputSymbol);
verify(inputType != null, "inputType is null for symbol: %s", inputSymbol);
ColumnStatisticsAggregation aggregation = createColumnAggregation(statisticType, inputSymbol, inputType);
Symbol symbol = planSymbolAllocator.newSymbol(statisticType + ":" + columnName, aggregation.getOutputType());
aggregations.put(symbol, aggregation.getAggregation());
descriptor.addColumnStatistic(columnStatisticMetadata, symbol);
}
StatisticAggregations aggregation = new StatisticAggregations(aggregations.build(), groupingSymbols);
return new TableStatisticAggregation(aggregation, descriptor.build());
}
use of io.prestosql.spi.statistics.ColumnStatisticMetadata in project boostkit-bigdata by kunpengcompute.
the class HiveMetadata method createPartitionStatistics.
private PartitionStatistics createPartitionStatistics(ConnectorSession session, Map<String, Type> columnTypes, ComputedStatistics computedStatistics) {
Map<ColumnStatisticMetadata, Block> computedColumnStatistics = computedStatistics.getColumnStatistics();
Block rowCountBlock = Optional.ofNullable(computedStatistics.getTableStatistics().get(ROW_COUNT)).orElseThrow(() -> new VerifyException("rowCount not present"));
verify(!rowCountBlock.isNull(0), "rowCount must never be null");
long rowCount = BIGINT.getLong(rowCountBlock, 0);
HiveBasicStatistics rowCountOnlyBasicStatistics = new HiveBasicStatistics(OptionalLong.empty(), OptionalLong.of(rowCount), OptionalLong.empty(), OptionalLong.empty());
return createPartitionStatistics(session, rowCountOnlyBasicStatistics, columnTypes, computedColumnStatistics);
}
use of io.prestosql.spi.statistics.ColumnStatisticMetadata in project hetu-core by openlookeng.
the class TestTableFinishOperator method testStatisticsAggregation.
@Test
public void testStatisticsAggregation() throws Exception {
TestTableFinisher tableFinisher = new TestTableFinisher();
ColumnStatisticMetadata statisticMetadata = new ColumnStatisticMetadata("column", MAX_VALUE);
StatisticAggregationsDescriptor<Integer> descriptor = new StatisticAggregationsDescriptor<>(ImmutableMap.of(), ImmutableMap.of(), ImmutableMap.of(statisticMetadata, 0));
Session session = testSessionBuilder().setSystemProperty("statistics_cpu_timer_enabled", "true").build();
TableFinishOperatorFactory operatorFactory = new TableFinishOperatorFactory(0, new PlanNodeId("node"), tableFinisher, new AggregationOperator.AggregationOperatorFactory(1, new PlanNodeId("test"), AggregationNode.Step.SINGLE, ImmutableList.of(LONG_MAX.bind(ImmutableList.of(2), Optional.empty())), true), descriptor, session);
DriverContext driverContext = createTaskContext(scheduledExecutor, scheduledExecutor, session).addPipelineContext(0, true, true, false).addDriverContext();
TableFinishOperator operator = (TableFinishOperator) operatorFactory.createOperator(driverContext);
List<Type> inputTypes = ImmutableList.of(BIGINT, VARBINARY, BIGINT);
operator.addInput(rowPagesBuilder(inputTypes).row(4, null, null).build().get(0));
operator.addInput(rowPagesBuilder(inputTypes).row(5, null, null).build().get(0));
operator.addInput(rowPagesBuilder(inputTypes).row(null, new byte[] { 1 }, null).build().get(0));
operator.addInput(rowPagesBuilder(inputTypes).row(null, new byte[] { 2 }, null).build().get(0));
operator.addInput(rowPagesBuilder(inputTypes).row(null, null, 6).build().get(0));
operator.addInput(rowPagesBuilder(inputTypes).row(null, null, 7).build().get(0));
assertThat(driverContext.getSystemMemoryUsage()).isGreaterThan(0);
assertEquals(driverContext.getMemoryUsage(), 0);
assertTrue(operator.isBlocked().isDone());
assertTrue(operator.needsInput());
operator.finish();
assertFalse(operator.isFinished());
assertNull(operator.getOutput());
List<Type> outputTypes = ImmutableList.of(BIGINT);
assertPageEquals(outputTypes, operator.getOutput(), rowPagesBuilder(outputTypes).row(9).build().get(0));
assertTrue(operator.isBlocked().isDone());
assertFalse(operator.needsInput());
assertTrue(operator.isFinished());
operator.close();
assertEquals(tableFinisher.getFragments(), ImmutableList.of(Slices.wrappedBuffer(new byte[] { 1 }), Slices.wrappedBuffer(new byte[] { 2 })));
assertEquals(tableFinisher.getComputedStatistics().size(), 1);
assertEquals(getOnlyElement(tableFinisher.getComputedStatistics()).getColumnStatistics().size(), 1);
Block expectedStatisticsBlock = new LongArrayBlockBuilder(null, 1).writeLong(7).closeEntry().build();
assertBlockEquals(BIGINT, getOnlyElement(tableFinisher.getComputedStatistics()).getColumnStatistics().get(statisticMetadata), expectedStatisticsBlock);
TableFinishInfo tableFinishInfo = operator.getInfo();
assertThat(tableFinishInfo.getStatisticsWallTime().getValue(NANOSECONDS)).isGreaterThan(0);
assertThat(tableFinishInfo.getStatisticsCpuTime().getValue(NANOSECONDS)).isGreaterThan(0);
assertEquals(driverContext.getSystemMemoryUsage(), 0);
assertEquals(driverContext.getMemoryUsage(), 0);
}
Aggregations