use of io.confluent.ksql.GenericRow in project ksql by confluentinc.
the class StructuredDataSourceNode method buildStream.
@Override
public SchemaKStream buildStream(final StreamsBuilder builder, final KsqlConfig ksqlConfig, final KafkaTopicClient kafkaTopicClient, final FunctionRegistry functionRegistry, final Map<String, Object> props, final SchemaRegistryClient schemaRegistryClient) {
if (getTimestampField() != null) {
int timestampColumnIndex = getTimeStampColumnIndex();
ksqlConfig.put(KsqlConfig.KSQL_TIMESTAMP_COLUMN_INDEX, timestampColumnIndex);
}
KsqlTopicSerDe ksqlTopicSerDe = getStructuredDataSource().getKsqlTopic().getKsqlTopicSerDe();
Serde<GenericRow> genericRowSerde = ksqlTopicSerDe.getGenericRowSerde(SchemaUtil.removeImplicitRowTimeRowKeyFromSchema(getSchema()), ksqlConfig, false, schemaRegistryClient);
if (getDataSourceType() == StructuredDataSource.DataSourceType.KTABLE) {
final KsqlTable table = (KsqlTable) getStructuredDataSource();
final KTable kTable = createKTable(builder, getAutoOffsetReset(props), table, genericRowSerde, table.getKsqlTopic().getKsqlTopicSerDe().getGenericRowSerde(getSchema(), ksqlConfig, true, schemaRegistryClient));
return new SchemaKTable(getSchema(), kTable, getKeyField(), new ArrayList<>(), table.isWindowed(), SchemaKStream.Type.SOURCE, functionRegistry, schemaRegistryClient);
}
return new SchemaKStream(getSchema(), builder.stream(getStructuredDataSource().getKsqlTopic().getKafkaTopicName(), Consumed.with(Serdes.String(), genericRowSerde)).mapValues(nonWindowedValueMapper).transformValues(new AddTimestampColumn()), getKeyField(), new ArrayList<>(), SchemaKStream.Type.SOURCE, functionRegistry, schemaRegistryClient);
}
use of io.confluent.ksql.GenericRow in project ksql by confluentinc.
the class SchemaKGroupedStream method aggregate.
@SuppressWarnings("unchecked")
public SchemaKTable aggregate(final Initializer initializer, final UdafAggregator aggregator, final WindowExpression windowExpression, final Serde<GenericRow> topicValueSerDe) {
final KTable aggKtable;
if (windowExpression != null) {
final Materialized<String, GenericRow, ?> materialized = Materialized.<String, GenericRow, WindowStore<Bytes, byte[]>>with(Serdes.String(), topicValueSerDe);
final KsqlWindowExpression ksqlWindowExpression = windowExpression.getKsqlWindowExpression();
aggKtable = ksqlWindowExpression.applyAggregate(kgroupedStream, initializer, aggregator, materialized);
} else {
aggKtable = kgroupedStream.aggregate(initializer, aggregator, Materialized.with(Serdes.String(), topicValueSerDe));
}
return new SchemaKTable(schema, aggKtable, keyField, sourceSchemaKStreams, windowExpression != null, SchemaKStream.Type.AGGREGATE, functionRegistry, schemaRegistryClient);
}
use of io.confluent.ksql.GenericRow in project ksql by confluentinc.
the class KsqlRestClientTest method testStreamRowFromServer.
@Test
public void testStreamRowFromServer() throws InterruptedException {
MockStreamedQueryResource sqr = mockApplication.getStreamedQueryResource();
RestResponse<KsqlRestClient.QueryStream> queryResponse = ksqlRestClient.makeQueryRequest("Select *");
Assert.assertNotNull(queryResponse);
Assert.assertTrue(queryResponse.isSuccessful());
// Get the stream writer from the mock server and load it up with a row
List<MockStreamedQueryResource.TestStreamWriter> writers = sqr.getWriters();
Assert.assertEquals(1, writers.size());
MockStreamedQueryResource.TestStreamWriter writer = writers.get(0);
try {
writer.enq("hello");
// Try and receive the row. Do this from another thread to avoid blocking indefinitely
KsqlRestClient.QueryStream queryStream = queryResponse.getResponse();
Thread t = new Thread(() -> queryStream.hasNext());
t.setDaemon(true);
t.start();
t.join(10000);
Assert.assertFalse(t.isAlive());
Assert.assertTrue(queryStream.hasNext());
StreamedRow sr = queryStream.next();
Assert.assertNotNull(sr);
GenericRow row = sr.getRow();
Assert.assertEquals(1, row.getColumns().size());
Assert.assertEquals("hello", row.getColumns().get(0));
} finally {
writer.finished();
}
}
use of io.confluent.ksql.GenericRow in project ksql by confluentinc.
the class KsqlGenericRowAvroDeserializerTest method shouldCreateCorrectRow.
@Test
public void shouldCreateCorrectRow() {
KafkaAvroDeserializer kafkaAvroDeserializer = EasyMock.mock(KafkaAvroDeserializer.class);
EasyMock.expect(kafkaAvroDeserializer.deserialize(EasyMock.anyString(), EasyMock.anyObject())).andReturn(genericRecord);
expectLastCall();
replay(kafkaAvroDeserializer);
KsqlGenericRowAvroDeserializer ksqlGenericRowAvroDeserializer = new KsqlGenericRowAvroDeserializer(schema, kafkaAvroDeserializer, false);
GenericRow genericRow = ksqlGenericRowAvroDeserializer.deserialize("", new byte[] {});
assertThat("Column number does not match.", genericRow.getColumns().size(), equalTo(6));
assertThat("Invalid column value.", genericRow.getColumns().get(0), equalTo(1511897796092L));
assertThat("Invalid column value.", genericRow.getColumns().get(1), equalTo(1L));
assertThat("Invalid column value.", ((Double[]) genericRow.getColumns().get(4))[0], equalTo(100.0));
assertThat("Invalid column value.", ((Map<String, Double>) genericRow.getColumns().get(5)).get("key1"), equalTo(100.0));
}
use of io.confluent.ksql.GenericRow in project ksql by confluentinc.
the class KsqlGenericRowAvroDeserializerTest method shouldDeserializeCorrectly.
@Test
@SuppressWarnings("unchecked")
public void shouldDeserializeCorrectly() {
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
GenericRow genericRow = new GenericRow(columns);
KsqlGenericRowAvroDeserializer ksqlGenericRowAvroDeserializer = new KsqlGenericRowAvroDeserializer(schema, schemaRegistryClient, false);
byte[] serializedRow = getSerializedRow("t1", schemaRegistryClient, avroSchema, genericRow);
GenericRow row = ksqlGenericRowAvroDeserializer.deserialize("t1", serializedRow);
Assert.assertNotNull(row);
assertThat("Incorrect deserializarion", row.getColumns().size(), equalTo(6));
assertThat("Incorrect deserializarion", row.getColumns().get(0), equalTo(1511897796092L));
assertThat("Incorrect deserializarion", row.getColumns().get(1), equalTo(1L));
assertThat("Incorrect deserializarion", row.getColumns().get(2), equalTo("item_1"));
assertThat("Incorrect deserializarion", row.getColumns().get(3), equalTo(10.0));
assertThat("Incorrect deserializarion", ((Double[]) row.getColumns().get(4)).length, equalTo(1));
assertThat("Incorrect deserializarion", ((Map) row.getColumns().get(5)).size(), equalTo(1));
}
Aggregations