use of io.trino.RowPagesBuilder in project trino by trinodb.
the class TestHashSemiJoinOperator method testProbeAndBuildNulls.
@Test(dataProvider = "hashEnabledValues")
public void testProbeAndBuildNulls(boolean hashEnabled) {
DriverContext driverContext = taskContext.addPipelineContext(0, true, true, false).addDriverContext();
// build
OperatorContext operatorContext = driverContext.addOperatorContext(0, new PlanNodeId("test"), ValuesOperator.class.getSimpleName());
List<Type> buildTypes = ImmutableList.of(BIGINT);
RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, Ints.asList(0), buildTypes);
Operator buildOperator = new ValuesOperator(operatorContext, rowPagesBuilder.row(0L).row(1L).row((Object) null).row(3L).build());
SetBuilderOperatorFactory setBuilderOperatorFactory = new SetBuilderOperatorFactory(1, new PlanNodeId("test"), buildTypes.get(0), 0, rowPagesBuilder.getHashChannel(), 10, new JoinCompiler(typeOperators), blockTypeOperators);
Operator setBuilderOperator = setBuilderOperatorFactory.createOperator(driverContext);
Driver driver = Driver.createDriver(driverContext, buildOperator, setBuilderOperator);
while (!driver.isFinished()) {
driver.process();
}
// probe
List<Type> probeTypes = ImmutableList.of(BIGINT);
RowPagesBuilder rowPagesBuilderProbe = rowPagesBuilder(hashEnabled, Ints.asList(0), probeTypes);
List<Page> probeInput = rowPagesBuilderProbe.row(0L).row((Object) null).row(1L).row(2L).build();
Optional<Integer> probeHashChannel = hashEnabled ? Optional.of(probeTypes.size()) : Optional.empty();
OperatorFactory joinOperatorFactory = HashSemiJoinOperator.createOperatorFactory(2, new PlanNodeId("test"), setBuilderOperatorFactory.getSetProvider(), rowPagesBuilderProbe.getTypes(), 0, probeHashChannel);
// expected
MaterializedResult expected = resultBuilder(driverContext.getSession(), concat(probeTypes, ImmutableList.of(BOOLEAN))).row(0L, true).row(null, null).row(1L, true).row(2L, null).build();
OperatorAssertion.assertOperatorEquals(joinOperatorFactory, driverContext, probeInput, expected, hashEnabled, ImmutableList.of(probeTypes.size()));
}
use of io.trino.RowPagesBuilder in project trino by trinodb.
the class TestHashSemiJoinOperator method testBuildSideNulls.
@Test(dataProvider = "hashEnabledValues")
public void testBuildSideNulls(boolean hashEnabled) {
DriverContext driverContext = taskContext.addPipelineContext(0, true, true, false).addDriverContext();
// build
OperatorContext operatorContext = driverContext.addOperatorContext(0, new PlanNodeId("test"), ValuesOperator.class.getSimpleName());
List<Type> buildTypes = ImmutableList.of(BIGINT);
RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, Ints.asList(0), buildTypes);
Operator buildOperator = new ValuesOperator(operatorContext, rowPagesBuilder.row(0L).row(1L).row(2L).row(2L).row(3L).row((Object) null).build());
SetBuilderOperatorFactory setBuilderOperatorFactory = new SetBuilderOperatorFactory(1, new PlanNodeId("test"), buildTypes.get(0), 0, rowPagesBuilder.getHashChannel(), 10, new JoinCompiler(typeOperators), blockTypeOperators);
Operator setBuilderOperator = setBuilderOperatorFactory.createOperator(driverContext);
Driver driver = Driver.createDriver(driverContext, buildOperator, setBuilderOperator);
while (!driver.isFinished()) {
driver.process();
}
// probe
List<Type> probeTypes = ImmutableList.of(BIGINT);
RowPagesBuilder rowPagesBuilderProbe = rowPagesBuilder(hashEnabled, Ints.asList(0), probeTypes);
List<Page> probeInput = rowPagesBuilderProbe.addSequencePage(4, 1).build();
Optional<Integer> probeHashChannel = hashEnabled ? Optional.of(probeTypes.size()) : Optional.empty();
OperatorFactory joinOperatorFactory = HashSemiJoinOperator.createOperatorFactory(2, new PlanNodeId("test"), setBuilderOperatorFactory.getSetProvider(), rowPagesBuilderProbe.getTypes(), 0, probeHashChannel);
// expected
MaterializedResult expected = resultBuilder(driverContext.getSession(), concat(probeTypes, ImmutableList.of(BOOLEAN))).row(1L, true).row(2L, true).row(3L, true).row(4L, null).build();
OperatorAssertion.assertOperatorEquals(joinOperatorFactory, driverContext, probeInput, expected, hashEnabled, ImmutableList.of(probeTypes.size()));
}
use of io.trino.RowPagesBuilder in project trino by trinodb.
the class TestHashAggregationOperator method testMultiplePartialFlushes.
@Test(dataProvider = "hashEnabled")
public void testMultiplePartialFlushes(boolean hashEnabled) throws Exception {
List<Integer> hashChannels = Ints.asList(0);
RowPagesBuilder rowPagesBuilder = rowPagesBuilder(hashEnabled, hashChannels, BIGINT);
List<Page> input = rowPagesBuilder.addSequencePage(500, 0).addSequencePage(500, 500).addSequencePage(500, 1000).addSequencePage(500, 1500).build();
HashAggregationOperatorFactory operatorFactory = new HashAggregationOperatorFactory(0, new PlanNodeId("test"), ImmutableList.of(BIGINT), hashChannels, ImmutableList.of(), PARTIAL, ImmutableList.of(LONG_MIN.createAggregatorFactory(PARTIAL, ImmutableList.of(0), OptionalInt.empty())), rowPagesBuilder.getHashChannel(), Optional.empty(), 100_000, Optional.of(DataSize.of(1, KILOBYTE)), joinCompiler, blockTypeOperators, Optional.empty());
DriverContext driverContext = createDriverContext(1024);
try (Operator operator = operatorFactory.createOperator(driverContext)) {
List<Page> expectedPages = rowPagesBuilder(BIGINT, BIGINT).addSequencePage(2000, 0, 0).build();
MaterializedResult expected = resultBuilder(driverContext.getSession(), BIGINT, BIGINT).pages(expectedPages).build();
Iterator<Page> inputIterator = input.iterator();
// Fill up the aggregation
while (operator.needsInput() && inputIterator.hasNext()) {
operator.addInput(inputIterator.next());
}
assertThat(driverContext.getMemoryUsage()).isGreaterThan(0);
// Drain the output (partial flush)
List<Page> outputPages = new ArrayList<>();
while (true) {
Page output = operator.getOutput();
if (output == null) {
break;
}
outputPages.add(output);
}
// There should be some pages that were drained
assertTrue(!outputPages.isEmpty());
// The operator need input again since this was a partial flush
assertTrue(operator.needsInput());
// Now, drive the operator to completion
outputPages.addAll(toPages(operator, inputIterator));
MaterializedResult actual;
if (hashEnabled) {
// Drop the hashChannel for all pages
outputPages = dropChannel(outputPages, ImmutableList.of(1));
}
actual = toMaterializedResult(operator.getOperatorContext().getSession(), expected.getTypes(), outputPages);
assertEquals(actual.getTypes(), expected.getTypes());
assertEqualsIgnoreOrder(actual.getMaterializedRows(), expected.getMaterializedRows());
}
assertEquals(driverContext.getMemoryUsage(), 0);
assertEquals(driverContext.getRevocableMemoryUsage(), 0);
}
use of io.trino.RowPagesBuilder in project trino by trinodb.
the class TestShardWriter method testWriter.
@Test
public void testWriter() throws Exception {
List<Long> columnIds = ImmutableList.of(1L, 2L, 4L, 6L, 7L, 8L, 9L, 10L);
ArrayType arrayType = new ArrayType(BIGINT);
ArrayType arrayOfArrayType = new ArrayType(arrayType);
Type mapType = TESTING_TYPE_MANAGER.getParameterizedType(StandardTypes.MAP, ImmutableList.of(TypeSignatureParameter.typeParameter(createVarcharType(10).getTypeSignature()), TypeSignatureParameter.typeParameter(BOOLEAN.getTypeSignature())));
List<Type> columnTypes = ImmutableList.of(BIGINT, createVarcharType(10), VARBINARY, DOUBLE, BOOLEAN, arrayType, mapType, arrayOfArrayType);
File file = directory.resolve(System.nanoTime() + ".orc").toFile();
byte[] bytes1 = octets(0x00, 0xFE, 0xFF);
byte[] bytes3 = octets(0x01, 0x02, 0x19, 0x80);
RowPagesBuilder rowPagesBuilder = RowPagesBuilder.rowPagesBuilder(columnTypes).row(123L, "hello", wrappedBuffer(bytes1), 123.456, true, arrayBlockOf(BIGINT, 1, 2), mapBlockOf(createVarcharType(5), BOOLEAN, "k1", true), arrayBlockOf(arrayType, arrayBlockOf(BIGINT, 5))).row(null, "world", null, Double.POSITIVE_INFINITY, null, arrayBlockOf(BIGINT, 3, null), mapBlockOf(createVarcharType(5), BOOLEAN, "k2", null), arrayBlockOf(arrayType, null, arrayBlockOf(BIGINT, 6, 7))).row(456L, "bye \u2603", wrappedBuffer(bytes3), Double.NaN, false, arrayBlockOf(BIGINT), mapBlockOf(createVarcharType(5), BOOLEAN, "k3", false), arrayBlockOf(arrayType, arrayBlockOf(BIGINT)));
try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(new EmptyClassLoader());
OrcFileWriter writer = new OrcFileWriter(columnIds, columnTypes, file)) {
writer.appendPages(rowPagesBuilder.build());
}
try (OrcDataSource dataSource = fileOrcDataSource(file)) {
OrcRecordReader reader = createReader(dataSource, columnIds, columnTypes);
assertEquals(reader.getReaderRowCount(), 3);
assertEquals(reader.getReaderPosition(), 0);
assertEquals(reader.getFileRowCount(), reader.getReaderRowCount());
assertEquals(reader.getFilePosition(), reader.getFilePosition());
Page page = reader.nextPage();
assertEquals(page.getPositionCount(), 3);
assertEquals(reader.getReaderPosition(), 0);
assertEquals(reader.getFilePosition(), reader.getFilePosition());
Block column0 = page.getBlock(0);
assertEquals(column0.isNull(0), false);
assertEquals(column0.isNull(1), true);
assertEquals(column0.isNull(2), false);
assertEquals(BIGINT.getLong(column0, 0), 123L);
assertEquals(BIGINT.getLong(column0, 2), 456L);
Block column1 = page.getBlock(1);
assertEquals(createVarcharType(10).getSlice(column1, 0), utf8Slice("hello"));
assertEquals(createVarcharType(10).getSlice(column1, 1), utf8Slice("world"));
assertEquals(createVarcharType(10).getSlice(column1, 2), utf8Slice("bye \u2603"));
Block column2 = page.getBlock(2);
assertEquals(VARBINARY.getSlice(column2, 0), wrappedBuffer(bytes1));
assertEquals(column2.isNull(1), true);
assertEquals(VARBINARY.getSlice(column2, 2), wrappedBuffer(bytes3));
Block column3 = page.getBlock(3);
assertEquals(column3.isNull(0), false);
assertEquals(column3.isNull(1), false);
assertEquals(column3.isNull(2), false);
assertEquals(DOUBLE.getDouble(column3, 0), 123.456);
assertEquals(DOUBLE.getDouble(column3, 1), Double.POSITIVE_INFINITY);
assertEquals(DOUBLE.getDouble(column3, 2), Double.NaN);
Block column4 = page.getBlock(4);
assertEquals(column4.isNull(0), false);
assertEquals(column4.isNull(1), true);
assertEquals(column4.isNull(2), false);
assertEquals(BOOLEAN.getBoolean(column4, 0), true);
assertEquals(BOOLEAN.getBoolean(column4, 2), false);
Block column5 = page.getBlock(5);
assertEquals(column5.getPositionCount(), 3);
assertTrue(arrayBlocksEqual(BIGINT, arrayType.getObject(column5, 0), arrayBlockOf(BIGINT, 1, 2)));
assertTrue(arrayBlocksEqual(BIGINT, arrayType.getObject(column5, 1), arrayBlockOf(BIGINT, 3, null)));
assertTrue(arrayBlocksEqual(BIGINT, arrayType.getObject(column5, 2), arrayBlockOf(BIGINT)));
Block column6 = page.getBlock(6);
assertEquals(column6.getPositionCount(), 3);
assertTrue(mapBlocksEqual(createVarcharType(5), BOOLEAN, arrayType.getObject(column6, 0), mapBlockOf(createVarcharType(5), BOOLEAN, "k1", true)));
Block object = arrayType.getObject(column6, 1);
Block k2 = mapBlockOf(createVarcharType(5), BOOLEAN, "k2", null);
assertTrue(mapBlocksEqual(createVarcharType(5), BOOLEAN, object, k2));
assertTrue(mapBlocksEqual(createVarcharType(5), BOOLEAN, arrayType.getObject(column6, 2), mapBlockOf(createVarcharType(5), BOOLEAN, "k3", false)));
Block column7 = page.getBlock(7);
assertEquals(column7.getPositionCount(), 3);
assertTrue(arrayBlocksEqual(arrayType, arrayOfArrayType.getObject(column7, 0), arrayBlockOf(arrayType, arrayBlockOf(BIGINT, 5))));
assertTrue(arrayBlocksEqual(arrayType, arrayOfArrayType.getObject(column7, 1), arrayBlockOf(arrayType, null, arrayBlockOf(BIGINT, 6, 7))));
assertTrue(arrayBlocksEqual(arrayType, arrayOfArrayType.getObject(column7, 2), arrayBlockOf(arrayType, arrayBlockOf(BIGINT))));
assertNull(reader.nextPage());
assertEquals(reader.getReaderPosition(), 3);
assertEquals(reader.getFilePosition(), reader.getFilePosition());
OrcFileMetadata orcFileMetadata = METADATA_CODEC.fromJson(reader.getUserMetadata().get(OrcFileMetadata.KEY).getBytes());
assertEquals(orcFileMetadata, new OrcFileMetadata(ImmutableMap.<Long, TypeId>builder().put(1L, BIGINT.getTypeId()).put(2L, createVarcharType(10).getTypeId()).put(4L, VARBINARY.getTypeId()).put(6L, DOUBLE.getTypeId()).put(7L, BOOLEAN.getTypeId()).put(8L, arrayType.getTypeId()).put(9L, mapType.getTypeId()).put(10L, arrayOfArrayType.getTypeId()).buildOrThrow()));
}
File crcFile = new File(file.getParentFile(), "." + file.getName() + ".crc");
assertFalse(crcFile.exists());
}
Aggregations