use of io.trino.spi.block.LongArrayBlockBuilder in project trino by trinodb.
the class TestLongArrayBlock method createBlockBuilderWithValues.
private static BlockBuilder createBlockBuilderWithValues(Slice[] expectedValues) {
LongArrayBlockBuilder blockBuilder = new LongArrayBlockBuilder(null, expectedValues.length);
writeValues(expectedValues, blockBuilder);
return blockBuilder;
}
use of io.trino.spi.block.LongArrayBlockBuilder in project trino by trinodb.
the class TestRawEncoderMapping method testMapping.
@Test
public void testMapping() {
EncoderColumnHandle col1 = new KafkaColumnHandle("test1", BIGINT, "0", "LONG", null, false, false, false);
EncoderColumnHandle col2 = new KafkaColumnHandle("test2", createUnboundedVarcharType(), "8:14", "BYTE", null, false, false, false);
EncoderColumnHandle col3 = new KafkaColumnHandle("test3", BIGINT, "14", "LONG", null, false, false, false);
EncoderColumnHandle col4 = new KafkaColumnHandle("test4", createUnboundedVarcharType(), "22:28", "BYTE", null, false, false, false);
EncoderColumnHandle col5 = new KafkaColumnHandle("test5", BIGINT, "28", "LONG", null, false, false, false);
EncoderColumnHandle col6 = new KafkaColumnHandle("test6", createVarcharType(6), "36:42", "BYTE", null, false, false, false);
EncoderColumnHandle col7 = new KafkaColumnHandle("test7", createVarcharType(6), "42:48", "BYTE", null, false, false, false);
RowEncoder rowEncoder = ENCODER_FACTORY.create(TestingConnectorSession.SESSION, Optional.empty(), ImmutableList.of(col1, col2, col3, col4, col5, col6, col7));
ByteBuffer buf = ByteBuffer.allocate(48);
// 0-8
buf.putLong(123456789);
// 8-14
buf.put("abcdef".getBytes(StandardCharsets.UTF_8));
// 14-22
buf.putLong(123456789);
// 22-28
buf.put("abcdef".getBytes(StandardCharsets.UTF_8));
// 28-36
buf.putLong(123456789);
// 36-42
buf.put("abcdef".getBytes(StandardCharsets.UTF_8));
// 42-48
buf.put("abcdef".getBytes(StandardCharsets.UTF_8));
Block longArrayBlock = new LongArrayBlockBuilder(null, 1).writeLong(123456789).closeEntry().build();
Block varArrayBlock = new VariableWidthBlockBuilder(null, 1, 6).writeBytes(Slices.wrappedBuffer("abcdef".getBytes(StandardCharsets.UTF_8)), 0, 6).closeEntry().build();
rowEncoder.appendColumnValue(longArrayBlock, 0);
rowEncoder.appendColumnValue(varArrayBlock, 0);
rowEncoder.appendColumnValue(longArrayBlock, 0);
rowEncoder.appendColumnValue(varArrayBlock, 0);
rowEncoder.appendColumnValue(longArrayBlock, 0);
rowEncoder.appendColumnValue(varArrayBlock, 0);
rowEncoder.appendColumnValue(varArrayBlock, 0);
assertEquals(buf.array(), rowEncoder.toByteArray());
}
use of io.trino.spi.block.LongArrayBlockBuilder in project trino by trinodb.
the class TestDoubleType method testNaNHash.
@Test
public void testNaNHash() {
BlockBuilder blockBuilder = new LongArrayBlockBuilder(null, 4);
blockBuilder.writeLong(doubleToLongBits(Double.NaN));
blockBuilder.writeLong(doubleToRawLongBits(Double.NaN));
// the following two are the long values of a double NaN
blockBuilder.writeLong(-0x000fffffffffffffL);
blockBuilder.writeLong(0x7ff8000000000000L);
BlockPositionHashCode hashCodeOperator = blockTypeOperators.getHashCodeOperator(DOUBLE);
assertEquals(hashCodeOperator.hashCode(blockBuilder, 0), hashCodeOperator.hashCode(blockBuilder, 1));
assertEquals(hashCodeOperator.hashCode(blockBuilder, 0), hashCodeOperator.hashCode(blockBuilder, 2));
assertEquals(hashCodeOperator.hashCode(blockBuilder, 0), hashCodeOperator.hashCode(blockBuilder, 3));
BlockPositionXxHash64 xxHash64Operator = blockTypeOperators.getXxHash64Operator(DOUBLE);
assertEquals(xxHash64Operator.xxHash64(blockBuilder, 0), xxHash64Operator.xxHash64(blockBuilder, 1));
assertEquals(xxHash64Operator.xxHash64(blockBuilder, 0), xxHash64Operator.xxHash64(blockBuilder, 2));
assertEquals(xxHash64Operator.xxHash64(blockBuilder, 0), xxHash64Operator.xxHash64(blockBuilder, 3));
}
use of io.trino.spi.block.LongArrayBlockBuilder in project trino by trinodb.
the class TestTableFinishOperator method testStatisticsAggregation.
@Test
public void testStatisticsAggregation() throws Exception {
TestTableFinisher tableFinisher = new TestTableFinisher();
ColumnStatisticMetadata statisticMetadata = new ColumnStatisticMetadata("column", MAX_VALUE);
StatisticAggregationsDescriptor<Integer> descriptor = new StatisticAggregationsDescriptor<>(ImmutableMap.of(), ImmutableMap.of(), ImmutableMap.of(statisticMetadata, 0));
Session session = testSessionBuilder().setSystemProperty("statistics_cpu_timer_enabled", "true").build();
TableExecuteContextManager tableExecuteContextManager = new TableExecuteContextManager();
TableFinishOperatorFactory operatorFactory = new TableFinishOperatorFactory(0, new PlanNodeId("node"), tableFinisher, new AggregationOperator.AggregationOperatorFactory(1, new PlanNodeId("test"), ImmutableList.of(LONG_MAX.createAggregatorFactory(SINGLE, ImmutableList.of(2), OptionalInt.empty()))), descriptor, tableExecuteContextManager, true, session);
DriverContext driverContext = createTaskContext(scheduledExecutor, scheduledExecutor, session).addPipelineContext(0, true, true, false).addDriverContext();
tableExecuteContextManager.registerTableExecuteContextForQuery(driverContext.getPipelineContext().getTaskContext().getQueryContext().getQueryId());
TableFinishOperator operator = (TableFinishOperator) operatorFactory.createOperator(driverContext);
List<Type> inputTypes = ImmutableList.of(BIGINT, VARBINARY, BIGINT);
operator.addInput(rowPagesBuilder(inputTypes).row(4, null, null).build().get(0));
operator.addInput(rowPagesBuilder(inputTypes).row(5, null, null).build().get(0));
operator.addInput(rowPagesBuilder(inputTypes).row(null, new byte[] { 1 }, null).build().get(0));
operator.addInput(rowPagesBuilder(inputTypes).row(null, new byte[] { 2 }, null).build().get(0));
operator.addInput(rowPagesBuilder(inputTypes).row(null, null, 6).build().get(0));
operator.addInput(rowPagesBuilder(inputTypes).row(null, null, 7).build().get(0));
assertThat(driverContext.getMemoryUsage()).as("memoryUsage").isGreaterThan(0);
assertTrue(operator.isBlocked().isDone(), "isBlocked should be done");
assertTrue(operator.needsInput(), "needsInput should be true");
operator.finish();
assertFalse(operator.isFinished(), "isFinished should be false");
assertNull(operator.getOutput());
List<Type> outputTypes = ImmutableList.of(BIGINT);
assertPageEquals(outputTypes, operator.getOutput(), rowPagesBuilder(outputTypes).row(9).build().get(0));
assertTrue(operator.isBlocked().isDone(), "isBlocked should be done");
assertFalse(operator.needsInput(), "needsInput should be false");
assertTrue(operator.isFinished(), "isFinished should be true");
operator.close();
assertEquals(tableFinisher.getFragments(), ImmutableList.of(Slices.wrappedBuffer(new byte[] { 1 }), Slices.wrappedBuffer(new byte[] { 2 })));
assertEquals(tableFinisher.getComputedStatistics().size(), 1);
assertEquals(getOnlyElement(tableFinisher.getComputedStatistics()).getColumnStatistics().size(), 1);
Block expectedStatisticsBlock = new LongArrayBlockBuilder(null, 1).writeLong(7).closeEntry().build();
assertBlockEquals(BIGINT, getOnlyElement(tableFinisher.getComputedStatistics()).getColumnStatistics().get(statisticMetadata), expectedStatisticsBlock);
assertEquals(driverContext.getMemoryUsage(), 0, "memoryUsage");
}
use of io.trino.spi.block.LongArrayBlockBuilder in project trino by trinodb.
the class TestRunLengthEncodedBlock method testBuildingFromLongArrayBlockBuilder.
@Test
public void testBuildingFromLongArrayBlockBuilder() {
LongArrayBlockBuilder blockBuilder = new LongArrayBlockBuilder(null, 100);
populateNullValues(blockBuilder, 100);
assertEquals(blockBuilder.build().getEncodingName(), RunLengthBlockEncoding.NAME);
}
Aggregations