use of io.prestosql.spi.block.BlockBuilder in project hetu-core by openlookeng.
the class TestColumnarArray method test.
@Test
public void test() {
Slice[][] expectedValues = new Slice[ARRAY_SIZES.length][];
for (int i = 0; i < ARRAY_SIZES.length; i++) {
expectedValues[i] = new Slice[ARRAY_SIZES[i]];
for (int j = 0; j < ARRAY_SIZES[i]; j++) {
if (j % 3 != 1) {
expectedValues[i][j] = Slices.utf8Slice(format("%d.%d", i, j));
}
}
}
BlockBuilder blockBuilder = createBlockBuilderWithValues(expectedValues);
verifyBlock(blockBuilder, expectedValues);
verifyBlock(blockBuilder.build(), expectedValues);
Slice[][] expectedValuesWithNull = alternatingNullValues(expectedValues);
BlockBuilder blockBuilderWithNull = createBlockBuilderWithValues(expectedValuesWithNull);
verifyBlock(blockBuilderWithNull, expectedValuesWithNull);
verifyBlock(blockBuilderWithNull.build(), expectedValuesWithNull);
}
use of io.prestosql.spi.block.BlockBuilder in project hetu-core by openlookeng.
the class TestColumnarArray method createBlockBuilderWithValues.
public static BlockBuilder createBlockBuilderWithValues(Slice[][] expectedValues) {
BlockBuilder blockBuilder = new ArrayBlockBuilder(VARCHAR, null, 100, 100);
for (Slice[] expectedValue : expectedValues) {
if (expectedValue == null) {
blockBuilder.appendNull();
} else {
BlockBuilder elementBlockBuilder = VARCHAR.createBlockBuilder(null, expectedValue.length);
for (Slice v : expectedValue) {
if (v == null) {
elementBlockBuilder.appendNull();
} else {
VARCHAR.writeSlice(elementBlockBuilder, v);
}
}
blockBuilder.appendStructure(elementBlockBuilder.build());
}
}
return blockBuilder;
}
use of io.prestosql.spi.block.BlockBuilder in project hetu-core by openlookeng.
the class RowPageBuilder method append.
private void append(int channel, Object element) {
BlockBuilder blockBuilder = builders.get(channel);
Type type = types.get(channel);
appendToBlockBuilder(type, element, blockBuilder);
}
use of io.prestosql.spi.block.BlockBuilder in project hetu-core by openlookeng.
the class TestHashAggregationOperator method testHashBuilderResizeLimit.
@Test(dataProvider = "hashEnabled", expectedExceptions = ExceededMemoryLimitException.class, expectedExceptionsMessageRegExp = "Query exceeded per-node user memory limit of 3MB.*")
public void testHashBuilderResizeLimit(boolean hashEnabled) {
BlockBuilder builder = VARCHAR.createBlockBuilder(null, 1, MAX_BLOCK_SIZE_IN_BYTES);
// this must be larger than MAX_BLOCK_SIZE_IN_BYTES, 64K
VARCHAR.writeSlice(builder, Slices.allocate(5_000_000));
builder.build();
List<Integer> hashChannels = Ints.asList(0);
RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, hashChannels, VARCHAR);
List<Page> input = rowPagesBuilder.addSequencePage(10, 100).addBlocksPage(builder.build()).addSequencePage(10, 100).build();
DriverContext driverContext = createTaskContext(executor, scheduledExecutor, TEST_SESSION, new DataSize(3, MEGABYTE)).addPipelineContext(0, true, true, false).addDriverContext();
HashAggregationOperatorFactory operatorFactory = new HashAggregationOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(VARCHAR), hashChannels, ImmutableList.of(), Step.SINGLE, ImmutableList.of(COUNT.bind(ImmutableList.of(0), Optional.empty())), rowPagesBuilder.getHashChannel(), Optional.empty(), 100_000, Optional.of(new DataSize(16, MEGABYTE)), joinCompiler, false);
toPages(operatorFactory, driverContext, input);
}
use of io.prestosql.spi.block.BlockBuilder in project hetu-core by openlookeng.
the class TestHashAggregationOperator method testHashBuilderResize.
@Test(dataProvider = "hashEnabledAndMemoryLimitForMergeValues")
public void testHashBuilderResize(boolean hashEnabled, boolean spillEnabled, boolean revokeMemoryWhenAddingPages, long memoryLimitForMerge, long memoryLimitForMergeWithMemory) {
BlockBuilder builder = VARCHAR.createBlockBuilder(null, 1, MAX_BLOCK_SIZE_IN_BYTES);
// this must be larger than MAX_BLOCK_SIZE_IN_BYTES, 64K
VARCHAR.writeSlice(builder, Slices.allocate(200_000));
builder.build();
List<Integer> hashChannels = Ints.asList(0);
RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, hashChannels, VARCHAR);
List<Page> input = rowPagesBuilder.addSequencePage(10, 100).addBlocksPage(builder.build()).addSequencePage(10, 100).build();
DriverContext driverContext = createDriverContext(memoryLimitForMerge);
HashAggregationOperatorFactory operatorFactory = new HashAggregationOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(VARCHAR), hashChannels, ImmutableList.of(), Step.SINGLE, false, ImmutableList.of(COUNT.bind(ImmutableList.of(0), Optional.empty())), rowPagesBuilder.getHashChannel(), Optional.empty(), 100_000, Optional.of(new DataSize(16, MEGABYTE)), spillEnabled, succinctBytes(memoryLimitForMerge), succinctBytes(memoryLimitForMergeWithMemory), spillerFactory, joinCompiler, false);
toPages(operatorFactory, driverContext, input, revokeMemoryWhenAddingPages);
}
Aggregations