use of io.trino.plugin.kafka.KafkaColumnHandle in project trino by trinodb.
the class TestRawEncoderMapping method testMapping.
@Test
public void testMapping() {
EncoderColumnHandle col1 = new KafkaColumnHandle("test1", BIGINT, "0", "LONG", null, false, false, false);
EncoderColumnHandle col2 = new KafkaColumnHandle("test2", createUnboundedVarcharType(), "8:14", "BYTE", null, false, false, false);
EncoderColumnHandle col3 = new KafkaColumnHandle("test3", BIGINT, "14", "LONG", null, false, false, false);
EncoderColumnHandle col4 = new KafkaColumnHandle("test4", createUnboundedVarcharType(), "22:28", "BYTE", null, false, false, false);
EncoderColumnHandle col5 = new KafkaColumnHandle("test5", BIGINT, "28", "LONG", null, false, false, false);
EncoderColumnHandle col6 = new KafkaColumnHandle("test6", createVarcharType(6), "36:42", "BYTE", null, false, false, false);
EncoderColumnHandle col7 = new KafkaColumnHandle("test7", createVarcharType(6), "42:48", "BYTE", null, false, false, false);
RowEncoder rowEncoder = ENCODER_FACTORY.create(TestingConnectorSession.SESSION, Optional.empty(), ImmutableList.of(col1, col2, col3, col4, col5, col6, col7));
ByteBuffer buf = ByteBuffer.allocate(48);
// 0-8
buf.putLong(123456789);
// 8-14
buf.put("abcdef".getBytes(StandardCharsets.UTF_8));
// 14-22
buf.putLong(123456789);
// 22-28
buf.put("abcdef".getBytes(StandardCharsets.UTF_8));
// 28-36
buf.putLong(123456789);
// 36-42
buf.put("abcdef".getBytes(StandardCharsets.UTF_8));
// 42-48
buf.put("abcdef".getBytes(StandardCharsets.UTF_8));
Block longArrayBlock = new LongArrayBlockBuilder(null, 1).writeLong(123456789).closeEntry().build();
Block varArrayBlock = new VariableWidthBlockBuilder(null, 1, 6).writeBytes(Slices.wrappedBuffer("abcdef".getBytes(StandardCharsets.UTF_8)), 0, 6).closeEntry().build();
rowEncoder.appendColumnValue(longArrayBlock, 0);
rowEncoder.appendColumnValue(varArrayBlock, 0);
rowEncoder.appendColumnValue(longArrayBlock, 0);
rowEncoder.appendColumnValue(varArrayBlock, 0);
rowEncoder.appendColumnValue(longArrayBlock, 0);
rowEncoder.appendColumnValue(varArrayBlock, 0);
rowEncoder.appendColumnValue(varArrayBlock, 0);
assertEquals(buf.array(), rowEncoder.toByteArray());
}
use of io.trino.plugin.kafka.KafkaColumnHandle in project trino by trinodb.
the class TestAvroConfluentRowDecoder method testSingleValueRow.
@Test
public void testSingleValueRow() throws Exception {
MockSchemaRegistryClient mockSchemaRegistryClient = new MockSchemaRegistryClient();
Schema schema = Schema.create(Schema.Type.LONG);
mockSchemaRegistryClient.register(format("%s-key", TOPIC), schema);
Set<DecoderColumnHandle> columnHandles = ImmutableSet.<DecoderColumnHandle>builder().add(new KafkaColumnHandle("col1", BIGINT, "col1", null, null, false, false, false)).build();
RowDecoder rowDecoder = getRowDecoder(mockSchemaRegistryClient, columnHandles, schema);
testSingleValueRow(rowDecoder, 3L, schema, 1);
}
use of io.trino.plugin.kafka.KafkaColumnHandle in project trino by trinodb.
the class TestAvroConfluentRowDecoder method testDecodingRows.
@Test
public void testDecodingRows() throws Exception {
MockSchemaRegistryClient mockSchemaRegistryClient = new MockSchemaRegistryClient();
Schema initialSchema = SchemaBuilder.record(TOPIC).fields().name("col1").type().intType().noDefault().name("col2").type().stringType().noDefault().name("col3").type().intType().intDefault(42).name("col4").type().nullable().intType().noDefault().name("col5").type().nullable().bytesType().noDefault().endRecord();
Schema evolvedSchema = SchemaBuilder.record(TOPIC).fields().name("col1").type().intType().noDefault().name("col2").type().stringType().noDefault().name("col3").type().intType().intDefault(3).name("col4").type().nullable().intType().noDefault().name("col5").type().nullable().bytesType().noDefault().name("col6").type().optional().longType().endRecord();
mockSchemaRegistryClient.register(TOPIC + "-value", initialSchema);
mockSchemaRegistryClient.register(TOPIC + "-value", evolvedSchema);
Set<DecoderColumnHandle> columnHandles = ImmutableSet.<DecoderColumnHandle>builder().add(new KafkaColumnHandle("col1", INTEGER, "col1", null, null, false, false, false)).add(new KafkaColumnHandle("col2", VARCHAR, "col2", null, null, false, false, false)).add(new KafkaColumnHandle("col3", INTEGER, "col3", null, null, false, false, false)).add(new KafkaColumnHandle("col4", INTEGER, "col4", null, null, false, false, false)).add(new KafkaColumnHandle("col5", VARBINARY, "col5", null, null, false, false, false)).add(new KafkaColumnHandle("col6", BIGINT, "col6", null, null, false, false, false)).build();
RowDecoder rowDecoder = getRowDecoder(mockSchemaRegistryClient, columnHandles, evolvedSchema);
testRow(rowDecoder, generateRecord(initialSchema, Arrays.asList(3, "string-3", 30, 300, ByteBuffer.wrap(new byte[] { 1, 2, 3 }))), 1);
testRow(rowDecoder, generateRecord(initialSchema, Arrays.asList(3, "", 30, null, null)), 1);
testRow(rowDecoder, generateRecord(initialSchema, Arrays.asList(3, "\u0394\u66f4\u6539", 30, null, ByteBuffer.wrap(new byte[] { 1, 2, 3 }))), 1);
testRow(rowDecoder, generateRecord(evolvedSchema, Arrays.asList(4, "string-4", 40, 400, null, 4L)), 2);
testRow(rowDecoder, generateRecord(evolvedSchema, Arrays.asList(5, "string-5", 50, 500, ByteBuffer.wrap(new byte[] { 1, 2, 3 }), null)), 2);
}
Aggregations