use of io.confluent.ksql.util.KsqlConfig in project ksql by confluentinc.
the class KafkaTopicsListTest method shouldBuildValidTopicList.
@Test
public void shouldBuildValidTopicList() {
Collection<KsqlTopic> ksqlTopics = Collections.emptyList();
// represent the full list of topics
Map<String, TopicDescription> topicDescriptions = new HashMap<>();
TopicPartitionInfo topicPartitionInfo = new TopicPartitionInfo(1, new Node(1, "", 8088), Collections.emptyList(), Collections.emptyList());
topicDescriptions.put("test-topic", new TopicDescription("test-topic", false, Collections.singletonList(topicPartitionInfo)));
/**
* Return POJO for consumerGroupClient
*/
TopicPartition topicPartition = new TopicPartition("test-topic", 1);
KafkaConsumerGroupClientImpl.ConsumerSummary consumerSummary = new KafkaConsumerGroupClientImpl.ConsumerSummary("consumer-id");
consumerSummary.addPartition(topicPartition);
KafkaConsumerGroupClientImpl.ConsumerGroupSummary consumerGroupSummary = new KafkaConsumerGroupClientImpl.ConsumerGroupSummary();
consumerGroupSummary.addConsumerSummary(consumerSummary);
KafkaConsumerGroupClient consumerGroupClient = mock(KafkaConsumerGroupClient.class);
expect(consumerGroupClient.listGroups()).andReturn(Collections.singletonList("test-topic"));
expect(consumerGroupClient.describeConsumerGroup("test-topic")).andReturn(consumerGroupSummary);
replay(consumerGroupClient);
/**
* Test
*/
KafkaTopicsList topicsList = KafkaTopicsList.build("statement test", ksqlTopics, topicDescriptions, new KsqlConfig(Collections.EMPTY_MAP), consumerGroupClient);
assertThat(topicsList.getTopics().size(), equalTo(1));
KafkaTopicInfo first = topicsList.getTopics().iterator().next();
assertThat(first.getConsumerGroupCount(), equalTo(1));
assertThat(first.getConsumerCount(), equalTo(1));
assertThat(first.getReplicaInfo().size(), equalTo(1));
}
use of io.confluent.ksql.util.KsqlConfig in project ksql by confluentinc.
the class KsqlGenericRowAvroSerializerTest method shouldSerializeRowCorrectly.
@Test
public void shouldSerializeRowCorrectly() {
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer(schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));
List columns = Arrays.asList(1511897796092L, 1L, "item_1", 10.0, new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
GenericRow genericRow = new GenericRow(columns);
byte[] serializedRow = ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient);
GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow);
Assert.assertNotNull(genericRecord);
assertThat("Incorrect serialization.", genericRecord.get("ordertime".toUpperCase()), equalTo(1511897796092L));
assertThat("Incorrect serialization.", genericRecord.get("orderid".toUpperCase()), equalTo(1L));
assertThat("Incorrect serialization.", genericRecord.get("itemid".toUpperCase()).toString(), equalTo("item_1"));
assertThat("Incorrect serialization.", genericRecord.get("orderunits".toUpperCase()), equalTo(10.0));
GenericData.Array array = (GenericData.Array) genericRecord.get("arraycol".toUpperCase());
Map map = (Map) genericRecord.get("mapcol".toUpperCase());
assertThat("Incorrect serialization.", array.size(), equalTo(1));
assertThat("Incorrect serialization.", array.get(0), equalTo(100.0));
assertThat("Incorrect serialization.", map.size(), equalTo(1));
assertThat("Incorrect serialization.", map.get(new Utf8("key1")), equalTo(100.0));
}
use of io.confluent.ksql.util.KsqlConfig in project ksql by confluentinc.
the class KsqlGenericRowAvroSerializerTest method shouldSerializeRowWithNullCorrectly.
@Test
public void shouldSerializeRowWithNullCorrectly() {
SchemaRegistryClient schemaRegistryClient = new MockSchemaRegistryClient();
KsqlGenericRowAvroSerializer ksqlGenericRowAvroSerializer = new KsqlGenericRowAvroSerializer(schema, schemaRegistryClient, new KsqlConfig(new HashMap<>()));
List columns = Arrays.asList(1511897796092L, 1L, null, 10.0, new Double[] { 100.0 }, Collections.singletonMap("key1", 100.0));
GenericRow genericRow = new GenericRow(columns);
byte[] serializedRow = ksqlGenericRowAvroSerializer.serialize("t1", genericRow);
KafkaAvroDeserializer kafkaAvroDeserializer = new KafkaAvroDeserializer(schemaRegistryClient);
GenericRecord genericRecord = (GenericRecord) kafkaAvroDeserializer.deserialize("t1", serializedRow);
Assert.assertNotNull(genericRecord);
assertThat("Incorrect serialization.", genericRecord.get("ordertime".toUpperCase()), equalTo(1511897796092L));
assertThat("Incorrect serialization.", genericRecord.get("orderid".toUpperCase()), equalTo(1L));
assertThat("Incorrect serialization.", genericRecord.get("itemid".toUpperCase()), equalTo(null));
assertThat("Incorrect serialization.", genericRecord.get("orderunits".toUpperCase()), equalTo(10.0));
GenericData.Array array = (GenericData.Array) genericRecord.get("arraycol".toUpperCase());
Map map = (Map) genericRecord.get("mapcol".toUpperCase());
assertThat("Incorrect serialization.", array.size(), equalTo(1));
assertThat("Incorrect serialization.", array.get(0), equalTo(100.0));
assertThat("Incorrect serialization.", map, equalTo(Collections.singletonMap(new Utf8("key1"), 100.0)));
}
use of io.confluent.ksql.util.KsqlConfig in project ksql by confluentinc.
the class StandaloneExecutor method create.
public static StandaloneExecutor create(final Properties properties, final String queriesFile) {
final KsqlConfig ksqlConfig = new KsqlConfig(properties);
Map<String, Object> streamsProperties = ksqlConfig.getKsqlStreamConfigProps();
if (!streamsProperties.containsKey(StreamsConfig.APPLICATION_ID_CONFIG)) {
streamsProperties.put(StreamsConfig.APPLICATION_ID_CONFIG, KsqlConfig.KSQL_SERVICE_ID_DEFAULT);
}
final KsqlEngine ksqlEngine = new KsqlEngine(ksqlConfig, new KafkaTopicClientImpl(AdminClient.create(ksqlConfig.getKsqlAdminClientConfigProps())));
return new StandaloneExecutor(ksqlEngine, queriesFile);
}
Aggregations