use of io.confluent.ksql.util.OrderDataProvider in project ksql by confluentinc.
the class JoinIntTest method before.
@Before
public void before() throws Exception {
testHarness = new IntegrationTestHarness();
testHarness.start();
Map<String, Object> ksqlStreamConfigProps = new HashMap<>();
ksqlStreamConfigProps.putAll(testHarness.ksqlConfig.getKsqlStreamConfigProps());
// turn caching off to improve join consistency
ksqlStreamConfigProps.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);
ksqlStreamConfigProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
ksqlContext = KsqlContext.create(new KsqlConfig(ksqlStreamConfigProps), testHarness.schemaRegistryClient);
/**
* Setup test data
*/
testHarness.createTopic(itemTableTopicJson);
testHarness.createTopic(itemTableTopicAvro);
itemDataProvider = new ItemDataProvider();
testHarness.publishTestData(itemTableTopicJson, itemDataProvider, now - 500);
testHarness.publishTestData(itemTableTopicAvro, itemDataProvider, now - 500, DataSource.DataSourceSerDe.AVRO);
testHarness.createTopic(orderStreamTopicJson);
testHarness.createTopic(orderStreamTopicAvro);
orderDataProvider = new OrderDataProvider();
testHarness.publishTestData(orderStreamTopicJson, orderDataProvider, now);
testHarness.publishTestData(orderStreamTopicAvro, orderDataProvider, now, DataSource.DataSourceSerDe.AVRO);
createStreams();
}
use of io.confluent.ksql.util.OrderDataProvider in project ksql by confluentinc.
the class JsonFormatTest method produceInitData.
private void produceInitData() throws Exception {
OrderDataProvider orderDataProvider = new OrderDataProvider();
topicProducer.produceInputData(inputTopic, orderDataProvider.data(), orderDataProvider.schema());
Schema messageSchema = SchemaBuilder.struct().field("MESSAGE", SchemaBuilder.STRING_SCHEMA).build();
GenericRow messageRow = new GenericRow(Collections.singletonList("{\"log\":{\"@timestamp\":\"2017-05-30T16:44:22.175Z\",\"@version\":\"1\"," + "\"caasVersion\":\"0.0.2\",\"cloud\":\"aws\",\"logs\":[{\"entry\":\"first\"}],\"clusterId\":\"cp99\",\"clusterName\":\"kafka\",\"cpComponentId\":\"kafka\",\"host\":\"kafka-1-wwl0p\",\"k8sId\":\"k8s13\",\"k8sName\":\"perf\",\"level\":\"ERROR\",\"logger\":\"kafka.server.ReplicaFetcherThread\",\"message\":\"Found invalid messages during fetch for partition [foo512,172] offset 0 error Record is corrupt (stored crc = 1321230880, computed crc = 1139143803)\",\"networkId\":\"vpc-d8c7a9bf\",\"region\":\"us-west-2\",\"serverId\":\"1\",\"skuId\":\"sku5\",\"source\":\"kafka\",\"tenantId\":\"t47\",\"tenantName\":\"perf-test\",\"thread\":\"ReplicaFetcherThread-0-2\",\"zone\":\"us-west-2a\"},\"stream\":\"stdout\",\"time\":2017}"));
Map<String, GenericRow> records = new HashMap<>();
records.put("1", messageRow);
topicProducer.produceInputData(messageLogTopic, records, messageSchema);
}
use of io.confluent.ksql.util.OrderDataProvider in project ksql by confluentinc.
the class StreamsSelectAndProjectIntTest method before.
@Before
public void before() throws Exception {
testHarness = new IntegrationTestHarness();
testHarness.start();
ksqlContext = KsqlContext.create(testHarness.ksqlConfig, testHarness.schemaRegistryClient);
testHarness.createTopic(jsonTopicName);
testHarness.createTopic(avroTopicName);
/**
* Setup test data
*/
dataProvider = new OrderDataProvider();
jsonRecordMetadataMap = testHarness.publishTestData(jsonTopicName, dataProvider, null, DataSource.DataSourceSerDe.JSON);
avroRecordMetadataMap = testHarness.publishTestData(avroTopicName, dataProvider, null, DataSource.DataSourceSerDe.AVRO);
createOrdersStream();
}
use of io.confluent.ksql.util.OrderDataProvider in project ksql by confluentinc.
the class SecureIntegrationTest method produceInitData.
private void produceInitData() throws Exception {
if (topicClient.isTopicExists(INPUT_TOPIC)) {
return;
}
topicClient.createTopic(INPUT_TOPIC, 1, (short) 1);
final OrderDataProvider orderDataProvider = new OrderDataProvider();
topicProducer.produceInputData(INPUT_TOPIC, orderDataProvider.data(), orderDataProvider.schema());
}
use of io.confluent.ksql.util.OrderDataProvider in project ksql by confluentinc.
the class UdfIntTest method before.
@Before
public void before() throws Exception {
testHarness = new IntegrationTestHarness();
testHarness.start();
ksqlContext = KsqlContext.create(testHarness.ksqlConfig, testHarness.schemaRegistryClient);
testHarness.createTopic(jsonTopicName);
testHarness.createTopic(avroTopicName);
/**
* Setup test data
*/
orderDataProvider = new OrderDataProvider();
itemDataProvider = new ItemDataProvider();
jsonRecordMetadataMap = testHarness.publishTestData(jsonTopicName, orderDataProvider, null, DataSource.DataSourceSerDe.JSON);
avroRecordMetadataMap = testHarness.publishTestData(avroTopicName, orderDataProvider, null, DataSource.DataSourceSerDe.AVRO);
testHarness.publishTestData(delimitedTopicName, itemDataProvider, null, DataSource.DataSourceSerDe.DELIMITED);
createOrdersStream();
}
Aggregations