use of io.trino.plugin.kafka.encoder.EncoderColumnHandle in project trino by trinodb.
the class KafkaPageSinkProvider method createPageSink.
@Override
public ConnectorPageSink createPageSink(ConnectorTransactionHandle transactionHandle, ConnectorSession session, ConnectorInsertTableHandle tableHandle) {
requireNonNull(tableHandle, "tableHandle is null");
KafkaTableHandle handle = (KafkaTableHandle) tableHandle;
ImmutableList.Builder<EncoderColumnHandle> keyColumns = ImmutableList.builder();
ImmutableList.Builder<EncoderColumnHandle> messageColumns = ImmutableList.builder();
handle.getColumns().forEach(col -> {
if (col.isInternal()) {
throw new IllegalArgumentException(format("unexpected internal column '%s'", col.getName()));
}
if (col.isKeyCodec()) {
keyColumns.add(col);
} else {
messageColumns.add(col);
}
});
RowEncoder keyEncoder = encoderFactory.create(session, handle.getKeyDataFormat(), getDataSchema(handle.getKeyDataSchemaLocation()), keyColumns.build());
RowEncoder messageEncoder = encoderFactory.create(session, handle.getMessageDataFormat(), getDataSchema(handle.getMessageDataSchemaLocation()), messageColumns.build());
return new KafkaPageSink(handle.getTopicName(), handle.getColumns(), keyEncoder, messageEncoder, producerFactory, session);
}
use of io.trino.plugin.kafka.encoder.EncoderColumnHandle in project trino by trinodb.
the class TestRawEncoderMapping method testMapping.
@Test
public void testMapping() {
EncoderColumnHandle col1 = new KafkaColumnHandle("test1", BIGINT, "0", "LONG", null, false, false, false);
EncoderColumnHandle col2 = new KafkaColumnHandle("test2", createUnboundedVarcharType(), "8:14", "BYTE", null, false, false, false);
EncoderColumnHandle col3 = new KafkaColumnHandle("test3", BIGINT, "14", "LONG", null, false, false, false);
EncoderColumnHandle col4 = new KafkaColumnHandle("test4", createUnboundedVarcharType(), "22:28", "BYTE", null, false, false, false);
EncoderColumnHandle col5 = new KafkaColumnHandle("test5", BIGINT, "28", "LONG", null, false, false, false);
EncoderColumnHandle col6 = new KafkaColumnHandle("test6", createVarcharType(6), "36:42", "BYTE", null, false, false, false);
EncoderColumnHandle col7 = new KafkaColumnHandle("test7", createVarcharType(6), "42:48", "BYTE", null, false, false, false);
RowEncoder rowEncoder = ENCODER_FACTORY.create(TestingConnectorSession.SESSION, Optional.empty(), ImmutableList.of(col1, col2, col3, col4, col5, col6, col7));
ByteBuffer buf = ByteBuffer.allocate(48);
// 0-8
buf.putLong(123456789);
// 8-14
buf.put("abcdef".getBytes(StandardCharsets.UTF_8));
// 14-22
buf.putLong(123456789);
// 22-28
buf.put("abcdef".getBytes(StandardCharsets.UTF_8));
// 28-36
buf.putLong(123456789);
// 36-42
buf.put("abcdef".getBytes(StandardCharsets.UTF_8));
// 42-48
buf.put("abcdef".getBytes(StandardCharsets.UTF_8));
Block longArrayBlock = new LongArrayBlockBuilder(null, 1).writeLong(123456789).closeEntry().build();
Block varArrayBlock = new VariableWidthBlockBuilder(null, 1, 6).writeBytes(Slices.wrappedBuffer("abcdef".getBytes(StandardCharsets.UTF_8)), 0, 6).closeEntry().build();
rowEncoder.appendColumnValue(longArrayBlock, 0);
rowEncoder.appendColumnValue(varArrayBlock, 0);
rowEncoder.appendColumnValue(longArrayBlock, 0);
rowEncoder.appendColumnValue(varArrayBlock, 0);
rowEncoder.appendColumnValue(longArrayBlock, 0);
rowEncoder.appendColumnValue(varArrayBlock, 0);
rowEncoder.appendColumnValue(varArrayBlock, 0);
assertEquals(buf.array(), rowEncoder.toByteArray());
}
Aggregations