use of io.trino.spi.type.TypeOperators in project trino by trinodb.
the class TestDynamicFilterSourceOperator method setUp.
@BeforeMethod
public void setUp() {
blockTypeOperators = new BlockTypeOperators(new TypeOperators());
executor = newCachedThreadPool(daemonThreadsNamed(getClass().getSimpleName() + "-%s"));
scheduledExecutor = newScheduledThreadPool(2, daemonThreadsNamed(getClass().getSimpleName() + "-scheduledExecutor-%s"));
pipelineContext = createTaskContext(executor, scheduledExecutor, TEST_SESSION).addPipelineContext(0, true, true, false);
partitions = ImmutableList.builder();
}
use of io.trino.spi.type.TypeOperators in project trino by trinodb.
the class TestSetDigest method testHashCounts.
@Test
public void testHashCounts() {
SetDigest digest1 = new SetDigest();
digest1.add(0);
digest1.add(0);
digest1.add(1);
SetDigest digest2 = new SetDigest();
digest2.add(0);
digest2.add(0);
digest2.add(2);
digest2.add(2);
MapType mapType = new MapType(BIGINT, SMALLINT, new TypeOperators());
Block block = hashCounts(mapType, digest1.serialize());
assertTrue(block instanceof SingleMapBlock);
Set<Short> blockValues = new HashSet<>();
for (int i = 1; i < block.getPositionCount(); i += 2) {
blockValues.add(block.getShort(i, 0));
}
Set<Short> expected = ImmutableSet.of((short) 1, (short) 2);
assertEquals(blockValues, expected);
digest1.mergeWith(digest2);
block = hashCounts(mapType, digest1.serialize());
assertTrue(block instanceof SingleMapBlock);
expected = ImmutableSet.of((short) 1, (short) 2, (short) 4);
blockValues = new HashSet<>();
for (int i = 1; i < block.getPositionCount(); i += 2) {
blockValues.add(block.getShort(i, 0));
}
assertEquals(blockValues, expected);
}
use of io.trino.spi.type.TypeOperators in project trino by trinodb.
the class TestDeltaLakeSchemaSupport method testSerializeSchemaAsJson.
@Test
public void testSerializeSchemaAsJson() throws Exception {
DeltaLakeColumnHandle arrayColumn = new DeltaLakeColumnHandle("arr", new ArrayType(new ArrayType(INTEGER)), REGULAR);
DeltaLakeColumnHandle structColumn = new DeltaLakeColumnHandle("str", RowType.from(ImmutableList.of(new RowType.Field(Optional.of("s1"), VarcharType.createUnboundedVarcharType()), new RowType.Field(Optional.of("s2"), RowType.from(ImmutableList.of(new RowType.Field(Optional.of("i1"), INTEGER), new RowType.Field(Optional.of("d2"), DecimalType.createDecimalType(38, 0))))))), REGULAR);
TypeOperators typeOperators = new TypeOperators();
DeltaLakeColumnHandle mapColumn = new DeltaLakeColumnHandle("m", new MapType(INTEGER, new MapType(INTEGER, INTEGER, typeOperators), typeOperators), REGULAR);
URL expected = getResource("io/trino/plugin/deltalake/transactionlog/schema/nested_schema.json");
ObjectMapper objectMapper = new ObjectMapper();
String jsonEncoding = serializeSchemaAsJson(ImmutableList.of(arrayColumn, structColumn, mapColumn));
assertEquals(objectMapper.readTree(jsonEncoding), objectMapper.readTree(expected));
}
use of io.trino.spi.type.TypeOperators in project trino by trinodb.
the class TestGlobalFunctionCatalog method testDuplicateFunctions.
@Test
public void testDuplicateFunctions() {
FunctionBundle functionBundle = extractFunctions(CustomAdd.class);
TypeOperators typeOperators = new TypeOperators();
GlobalFunctionCatalog globalFunctionCatalog = new GlobalFunctionCatalog();
globalFunctionCatalog.addFunctions(SystemFunctionBundle.create(new FeaturesConfig(), typeOperators, new BlockTypeOperators(typeOperators), NodeVersion.UNKNOWN));
globalFunctionCatalog.addFunctions(functionBundle);
assertThatThrownBy(() -> globalFunctionCatalog.addFunctions(functionBundle)).isInstanceOf(IllegalArgumentException.class).hasMessageMatching("\\QFunction already registered: custom_add(bigint,bigint):bigint\\E");
}
use of io.trino.spi.type.TypeOperators in project trino by trinodb.
the class TestTypedHistogram method testMassive.
@Test
public void testMassive() {
BlockBuilder inputBlockBuilder = BIGINT.createBlockBuilder(null, 5000);
BlockTypeOperators blockTypeOperators = new BlockTypeOperators(new TypeOperators());
TypedHistogram typedHistogram = new SingleTypedHistogram(BIGINT, blockTypeOperators.getEqualOperator(BIGINT), blockTypeOperators.getHashCodeOperator(BIGINT), 1000);
IntStream.range(1, 2000).flatMap(i -> IntStream.iterate(i, IntUnaryOperator.identity()).limit(i)).forEach(j -> BIGINT.writeLong(inputBlockBuilder, j));
Block inputBlock = inputBlockBuilder.build();
for (int i = 0; i < inputBlock.getPositionCount(); i++) {
typedHistogram.add(i, inputBlock, 1);
}
MapType mapType = mapType(BIGINT, BIGINT);
BlockBuilder out = mapType.createBlockBuilder(null, 1);
typedHistogram.serialize(out);
Block outputBlock = mapType.getObject(out, 0);
for (int i = 0; i < outputBlock.getPositionCount(); i += 2) {
assertEquals(BIGINT.getLong(outputBlock, i + 1), BIGINT.getLong(outputBlock, i));
}
}
Aggregations