use of io.confluent.ksql.GenericRow in project ksql by confluentinc.
the class SchemaKTable method into.
@SuppressWarnings("unchecked")
@Override
public SchemaKTable into(final String kafkaTopicName, final Serde<GenericRow> topicValueSerDe, Set<Integer> rowkeyIndexes) {
if (isWindowed) {
final Serde<Windowed<String>> windowedSerde = WindowedSerdes.timeWindowedSerdeFrom(String.class);
ktable.toStream().mapValues((ValueMapper<GenericRow, GenericRow>) row -> {
if (row == null) {
return null;
}
List columns = new ArrayList();
for (int i = 0; i < row.getColumns().size(); i++) {
if (!rowkeyIndexes.contains(i)) {
columns.add(row.getColumns().get(i));
}
}
return new GenericRow(columns);
}).to(kafkaTopicName, Produced.with(windowedSerde, topicValueSerDe));
} else {
ktable.toStream().mapValues((ValueMapper<GenericRow, GenericRow>) row -> {
if (row == null) {
return null;
}
List columns = new ArrayList();
for (int i = 0; i < row.getColumns().size(); i++) {
if (!rowkeyIndexes.contains(i)) {
columns.add(row.getColumns().get(i));
}
}
return new GenericRow(columns);
}).to(kafkaTopicName, Produced.with(Serdes.String(), topicValueSerDe));
}
return this;
}
use of io.confluent.ksql.GenericRow in project ksql by confluentinc.
the class StreamedQueryResourceTest method testStreamQuery.
@Test
public void testStreamQuery() throws Throwable {
final AtomicReference<Throwable> threadException = new AtomicReference<>(null);
final Thread.UncaughtExceptionHandler threadExceptionHandler = (thread, exception) -> threadException.compareAndSet(null, exception);
final String queryString = "SELECT * FROM test_stream;";
final SynchronousQueue<KeyValue<String, GenericRow>> rowQueue = new SynchronousQueue<>();
final LinkedList<GenericRow> writtenRows = new LinkedList<>();
final Thread rowQueuePopulatorThread = new Thread(new Runnable() {
@Override
public void run() {
try {
for (int i = 0; ; i++) {
String key = Integer.toString(i);
GenericRow value = new GenericRow(Collections.singletonList(i));
synchronized (writtenRows) {
writtenRows.add(value);
}
rowQueue.put(new KeyValue<>(key, value));
}
} catch (InterruptedException exception) {
// This should happen during the test, so it's fine
}
}
}, "Row Queue Populator");
rowQueuePopulatorThread.setUncaughtExceptionHandler(threadExceptionHandler);
rowQueuePopulatorThread.start();
final KafkaStreams mockKafkaStreams = mock(KafkaStreams.class);
mockKafkaStreams.start();
expectLastCall();
mockKafkaStreams.setUncaughtExceptionHandler(anyObject(Thread.UncaughtExceptionHandler.class));
expectLastCall();
expect(mockKafkaStreams.state()).andReturn(KafkaStreams.State.NOT_RUNNING);
mockKafkaStreams.close();
expectLastCall();
mockKafkaStreams.cleanUp();
expectLastCall();
final OutputNode mockOutputNode = mock(OutputNode.class);
expect(mockOutputNode.getSchema()).andReturn(SchemaBuilder.struct().field("f1", SchemaBuilder.INT32_SCHEMA));
final Map<String, Object> requestStreamsProperties = Collections.emptyMap();
KsqlEngine mockKsqlEngine = mock(KsqlEngine.class);
KafkaTopicClient mockKafkaTopicClient = mock(KafkaTopicClientImpl.class);
expect(mockKsqlEngine.getTopicClient()).andReturn(mockKafkaTopicClient);
final QueuedQueryMetadata queuedQueryMetadata = new QueuedQueryMetadata(queryString, mockKafkaStreams, mockOutputNode, "", rowQueue, DataSource.DataSourceType.KSTREAM, "", mockKafkaTopicClient, null);
expect(mockKsqlEngine.buildMultipleQueries(queryString, requestStreamsProperties)).andReturn(Collections.singletonList(queuedQueryMetadata));
mockKsqlEngine.removeTemporaryQuery(queuedQueryMetadata);
expectLastCall();
StatementParser mockStatementParser = mock(StatementParser.class);
expect(mockStatementParser.parseSingleStatement(queryString)).andReturn(mock(Query.class));
replay(mockKsqlEngine, mockStatementParser, mockKafkaStreams, mockOutputNode);
StreamedQueryResource testResource = new StreamedQueryResource(mockKsqlEngine, mockStatementParser, 1000);
Response response = testResource.streamQuery(new KsqlRequest(queryString, requestStreamsProperties));
PipedOutputStream responseOutputStream = new EOFPipedOutputStream();
PipedInputStream responseInputStream = new PipedInputStream(responseOutputStream, 1);
StreamingOutput responseStream = (StreamingOutput) response.getEntity();
final Thread queryWriterThread = new Thread(new Runnable() {
@Override
public void run() {
try {
responseStream.write(responseOutputStream);
} catch (EOFException exception) {
// It's fine
} catch (IOException exception) {
throw new RuntimeException(exception);
}
}
}, "Query Writer");
queryWriterThread.setUncaughtExceptionHandler(threadExceptionHandler);
queryWriterThread.start();
Scanner responseScanner = new Scanner(responseInputStream);
ObjectMapper objectMapper = new ObjectMapper();
for (int i = 0; i < 5; i++) {
if (!responseScanner.hasNextLine()) {
throw new Exception("Response input stream failed to have expected line available");
}
String responseLine = responseScanner.nextLine();
if (responseLine.trim().isEmpty()) {
i--;
} else {
GenericRow expectedRow;
synchronized (writtenRows) {
expectedRow = writtenRows.poll();
}
GenericRow testRow = objectMapper.readValue(responseLine, StreamedRow.class).getRow();
assertEquals(expectedRow, testRow);
}
}
responseOutputStream.close();
queryWriterThread.join();
rowQueuePopulatorThread.interrupt();
rowQueuePopulatorThread.join();
// Definitely want to make sure that the Kafka Streams instance has been closed and cleaned up
verify(mockKafkaStreams);
// If one of the other threads has somehow managed to throw an exception without breaking things up until this
// point, we throw that exception now in the main thread and cause the test to fail
Throwable exception = threadException.get();
if (exception != null) {
throw exception;
}
}
use of io.confluent.ksql.GenericRow in project ksql by confluentinc.
the class KsqlGenericRowAvroDeserializerTest method shouldDeserializeWithMissingFields.
@Test
public void shouldDeserializeWithMissingFields() {
String schemaStr1 = "{" + "\"namespace\": \"kql\"," + " \"name\": \"orders\"," + " \"type\": \"record\"," + " \"fields\": [" + " {\"name\": \"orderTime\", \"type\": \"long\"}," + " {\"name\": \"orderId\", \"type\": \"long\"}," + " {\"name\": \"itemId\", \"type\": \"string\"}," + " {\"name\": \"orderUnits\", \"type\": \"double\"}" + " ]" + "}";
Schema.Parser parser = new Schema.Parser();
Schema avroSchema1 = parser.parse(schemaStr1);
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0);
GenericRow genericRow = new GenericRow(columns);
byte[] serializedRow = getSerializedRow("t1", schemaRegistryClient, avroSchema1, genericRow);
KsqlGenericRowAvroDeserializer ksqlGenericRowAvroDeserializer = new KsqlGenericRowAvroDeserializer(schema, schemaRegistryClient, false);
GenericRow row = ksqlGenericRowAvroDeserializer.deserialize("t1", serializedRow);
assertThat("Incorrect deserializarion", row.getColumns().size(), equalTo(6));
assertThat("Incorrect deserializarion", (Long) row.getColumns().get(0), equalTo(1511897796092L));
assertThat("Incorrect deserializarion", (Long) row.getColumns().get(1), equalTo(1L));
assertThat("Incorrect deserializarion", (String) row.getColumns().get(2), equalTo("item_1"));
Assert.assertNull(row.getColumns().get(4));
Assert.assertNull(row.getColumns().get(5));
}
use of io.confluent.ksql.GenericRow in project ksql by confluentinc.
the class KsqlGenericRowAvroSerializerTest method shouldSerializeRowCorrectly.
@Test
public void shouldSerializeRowCorrectly() {
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer(schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));
List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
GenericRow genericRow = new GenericRow(columns);
byte[] serializedRow = ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient);
GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow);
Assert.assertNotNull(genericRecord);
assertThat("Incorrect serialization.", genericRecord.get("ordertime".toUpperCase()), equalTo(1511897796092L));
assertThat("Incorrect serialization.", genericRecord.get("orderid".toUpperCase()), equalTo(1L));
assertThat("Incorrect serialization.", genericRecord.get("itemid".toUpperCase()).toString(), equalTo("item_1"));
assertThat("Incorrect serialization.", genericRecord.get("orderunits".toUpperCase()), equalTo(10.0));
GenericData.Array array = (GenericData.Array) genericRecord.get("arraycol".toUpperCase());
Map map = (Map) genericRecord.get("mapcol".toUpperCase());
assertThat("Incorrect serialization.", array.size(), equalTo(1));
assertThat("Incorrect serialization.", array.get(0), equalTo(100.0));
assertThat("Incorrect serialization.", map.size(), equalTo(1));
assertThat("Incorrect serialization.", map.get(new Utf8("key1")), equalTo(100.0));
}
use of io.confluent.ksql.GenericRow in project ksql by confluentinc.
the class KsqlGenericRowAvroSerializerTest method shouldSerializeRowWithNullCorrectly.
@Test
public void shouldSerializeRowWithNullCorrectly() {
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer(schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));
List columns = Arrays.asList(1511897796092L, 1L, null, 10.0, new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
GenericRow genericRow = new GenericRow(columns);
byte[] serializedRow = ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient);
GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow);
Assert.assertNotNull(genericRecord);
assertThat("Incorrect serialization.", genericRecord.get("ordertime".toUpperCase()), equalTo(1511897796092L));
assertThat("Incorrect serialization.", genericRecord.get("orderid".toUpperCase()), equalTo(1L));
assertThat("Incorrect serialization.", genericRecord.get("itemid".toUpperCase()), equalTo(null));
assertThat("Incorrect serialization.", genericRecord.get("orderunits".toUpperCase()), equalTo(10.0));
GenericData.Array array = (GenericData.Array) genericRecord.get("arraycol".toUpperCase());
Map map = (Map) genericRecord.get("mapcol".toUpperCase());
assertThat("Incorrect serialization.", array.size(), equalTo(1));
assertThat("Incorrect serialization.", array.get(0), equalTo(100.0));
assertThat("Incorrect serialization.", map, equalTo(Collections.singletonMap(new Utf8("key1"), 100.0)));
}
Aggregations