Search in sources :

Example 1 with KafkaColumnHandle

use of io.trino.plugin.kafka.KafkaColumnHandle in project trino by trinodb.

the class TestRawEncoderMapping method testMapping.

@Test
public void testMapping() {
    EncoderColumnHandle col1 = new KafkaColumnHandle("test1", BIGINT, "0", "LONG", null, false, false, false);
    EncoderColumnHandle col2 = new KafkaColumnHandle("test2", createUnboundedVarcharType(), "8:14", "BYTE", null, false, false, false);
    EncoderColumnHandle col3 = new KafkaColumnHandle("test3", BIGINT, "14", "LONG", null, false, false, false);
    EncoderColumnHandle col4 = new KafkaColumnHandle("test4", createUnboundedVarcharType(), "22:28", "BYTE", null, false, false, false);
    EncoderColumnHandle col5 = new KafkaColumnHandle("test5", BIGINT, "28", "LONG", null, false, false, false);
    EncoderColumnHandle col6 = new KafkaColumnHandle("test6", createVarcharType(6), "36:42", "BYTE", null, false, false, false);
    EncoderColumnHandle col7 = new KafkaColumnHandle("test7", createVarcharType(6), "42:48", "BYTE", null, false, false, false);
    RowEncoder rowEncoder = ENCODER_FACTORY.create(TestingConnectorSession.SESSION, Optional.empty(), ImmutableList.of(col1, col2, col3, col4, col5, col6, col7));
    ByteBuffer buf = ByteBuffer.allocate(48);
    // 0-8
    buf.putLong(123456789);
    // 8-14
    buf.put("abcdef".getBytes(StandardCharsets.UTF_8));
    // 14-22
    buf.putLong(123456789);
    // 22-28
    buf.put("abcdef".getBytes(StandardCharsets.UTF_8));
    // 28-36
    buf.putLong(123456789);
    // 36-42
    buf.put("abcdef".getBytes(StandardCharsets.UTF_8));
    // 42-48
    buf.put("abcdef".getBytes(StandardCharsets.UTF_8));
    Block longArrayBlock = new LongArrayBlockBuilder(null, 1).writeLong(123456789).closeEntry().build();
    Block varArrayBlock = new VariableWidthBlockBuilder(null, 1, 6).writeBytes(Slices.wrappedBuffer("abcdef".getBytes(StandardCharsets.UTF_8)), 0, 6).closeEntry().build();
    rowEncoder.appendColumnValue(longArrayBlock, 0);
    rowEncoder.appendColumnValue(varArrayBlock, 0);
    rowEncoder.appendColumnValue(longArrayBlock, 0);
    rowEncoder.appendColumnValue(varArrayBlock, 0);
    rowEncoder.appendColumnValue(longArrayBlock, 0);
    rowEncoder.appendColumnValue(varArrayBlock, 0);
    rowEncoder.appendColumnValue(varArrayBlock, 0);
    assertEquals(buf.array(), rowEncoder.toByteArray());
}
Also used : VariableWidthBlockBuilder(io.trino.spi.block.VariableWidthBlockBuilder) EncoderColumnHandle(io.trino.plugin.kafka.encoder.EncoderColumnHandle) KafkaColumnHandle(io.trino.plugin.kafka.KafkaColumnHandle) Block(io.trino.spi.block.Block) RowEncoder(io.trino.plugin.kafka.encoder.RowEncoder) LongArrayBlockBuilder(io.trino.spi.block.LongArrayBlockBuilder) ByteBuffer(java.nio.ByteBuffer) Test(org.testng.annotations.Test)

Example 2 with KafkaColumnHandle

use of io.trino.plugin.kafka.KafkaColumnHandle in project trino by trinodb.

the class TestAvroConfluentRowDecoder method testSingleValueRow.

@Test
public void testSingleValueRow() throws Exception {
    MockSchemaRegistryClient mockSchemaRegistryClient = new MockSchemaRegistryClient();
    Schema schema = Schema.create(Schema.Type.LONG);
    mockSchemaRegistryClient.register(format("%s-key", TOPIC), schema);
    Set<DecoderColumnHandle> columnHandles = ImmutableSet.<DecoderColumnHandle>builder().add(new KafkaColumnHandle("col1", BIGINT, "col1", null, null, false, false, false)).build();
    RowDecoder rowDecoder = getRowDecoder(mockSchemaRegistryClient, columnHandles, schema);
    testSingleValueRow(rowDecoder, 3L, schema, 1);
}
Also used : MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) Schema(org.apache.avro.Schema) KafkaColumnHandle(io.trino.plugin.kafka.KafkaColumnHandle) DecoderColumnHandle(io.trino.decoder.DecoderColumnHandle) RowDecoder(io.trino.decoder.RowDecoder) Test(org.testng.annotations.Test)

Example 3 with KafkaColumnHandle

use of io.trino.plugin.kafka.KafkaColumnHandle in project trino by trinodb.

the class TestAvroConfluentRowDecoder method testDecodingRows.

@Test
public void testDecodingRows() throws Exception {
    MockSchemaRegistryClient mockSchemaRegistryClient = new MockSchemaRegistryClient();
    Schema initialSchema = SchemaBuilder.record(TOPIC).fields().name("col1").type().intType().noDefault().name("col2").type().stringType().noDefault().name("col3").type().intType().intDefault(42).name("col4").type().nullable().intType().noDefault().name("col5").type().nullable().bytesType().noDefault().endRecord();
    Schema evolvedSchema = SchemaBuilder.record(TOPIC).fields().name("col1").type().intType().noDefault().name("col2").type().stringType().noDefault().name("col3").type().intType().intDefault(3).name("col4").type().nullable().intType().noDefault().name("col5").type().nullable().bytesType().noDefault().name("col6").type().optional().longType().endRecord();
    mockSchemaRegistryClient.register(TOPIC + "-value", initialSchema);
    mockSchemaRegistryClient.register(TOPIC + "-value", evolvedSchema);
    Set<DecoderColumnHandle> columnHandles = ImmutableSet.<DecoderColumnHandle>builder().add(new KafkaColumnHandle("col1", INTEGER, "col1", null, null, false, false, false)).add(new KafkaColumnHandle("col2", VARCHAR, "col2", null, null, false, false, false)).add(new KafkaColumnHandle("col3", INTEGER, "col3", null, null, false, false, false)).add(new KafkaColumnHandle("col4", INTEGER, "col4", null, null, false, false, false)).add(new KafkaColumnHandle("col5", VARBINARY, "col5", null, null, false, false, false)).add(new KafkaColumnHandle("col6", BIGINT, "col6", null, null, false, false, false)).build();
    RowDecoder rowDecoder = getRowDecoder(mockSchemaRegistryClient, columnHandles, evolvedSchema);
    testRow(rowDecoder, generateRecord(initialSchema, Arrays.asList(3, "string-3", 30, 300, ByteBuffer.wrap(new byte[] { 1, 2, 3 }))), 1);
    testRow(rowDecoder, generateRecord(initialSchema, Arrays.asList(3, "", 30, null, null)), 1);
    testRow(rowDecoder, generateRecord(initialSchema, Arrays.asList(3, "\u0394\u66f4\u6539", 30, null, ByteBuffer.wrap(new byte[] { 1, 2, 3 }))), 1);
    testRow(rowDecoder, generateRecord(evolvedSchema, Arrays.asList(4, "string-4", 40, 400, null, 4L)), 2);
    testRow(rowDecoder, generateRecord(evolvedSchema, Arrays.asList(5, "string-5", 50, 500, ByteBuffer.wrap(new byte[] { 1, 2, 3 }), null)), 2);
}
Also used : MockSchemaRegistryClient(io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient) Schema(org.apache.avro.Schema) KafkaColumnHandle(io.trino.plugin.kafka.KafkaColumnHandle) DecoderColumnHandle(io.trino.decoder.DecoderColumnHandle) RowDecoder(io.trino.decoder.RowDecoder) Test(org.testng.annotations.Test)

Aggregations

KafkaColumnHandle (io.trino.plugin.kafka.KafkaColumnHandle)3 Test (org.testng.annotations.Test)3 MockSchemaRegistryClient (io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient)2 DecoderColumnHandle (io.trino.decoder.DecoderColumnHandle)2 RowDecoder (io.trino.decoder.RowDecoder)2 Schema (org.apache.avro.Schema)2 EncoderColumnHandle (io.trino.plugin.kafka.encoder.EncoderColumnHandle)1 RowEncoder (io.trino.plugin.kafka.encoder.RowEncoder)1 Block (io.trino.spi.block.Block)1 LongArrayBlockBuilder (io.trino.spi.block.LongArrayBlockBuilder)1 VariableWidthBlockBuilder (io.trino.spi.block.VariableWidthBlockBuilder)1 ByteBuffer (java.nio.ByteBuffer)1