Search in sources :

Example 86 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project ksql by confluentinc.

the class UdfIntTest method testShouldCastSelectedColumns.

private void testShouldCastSelectedColumns(String resultStreamName, String inputStreamName, DataSource.DataSourceSerDe dataSourceSerde) throws Exception {
    final String selectColumns = " CAST (ORDERUNITS AS INTEGER), CAST( PRICEARRAY[1]>1000 AS STRING), CAST (SUBSTRING" + "(ITEMID, 5) AS DOUBLE), CAST(ORDERUNITS AS VARCHAR) ";
    final String queryString = String.format("CREATE STREAM %s AS SELECT %s FROM %s WHERE %s;", resultStreamName, selectColumns, inputStreamName, "ORDERUNITS > 20 AND ITEMID LIKE '%_8'");
    ksqlContext.sql(queryString);
    Schema resultSchema = ksqlContext.getMetaStore().getSource(resultStreamName).getSchema();
    Map<String, GenericRow> expectedResults = Collections.singletonMap("8", new GenericRow(Arrays.asList(null, null, 80, "true", 8.0, "80.0")));
    Map<String, GenericRow> results = testHarness.consumeData(resultStreamName, resultSchema, 4, new StringDeserializer(), IntegrationTestHarness.RESULTS_POLL_MAX_TIME_MS, dataSourceSerde);
    assertThat(results, equalTo(expectedResults));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Schema(org.apache.kafka.connect.data.Schema)

Example 87 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project ksql by confluentinc.

the class UdfIntTest method testApplyUdfsToColumnsDelimited.

@Test
public void testApplyUdfsToColumnsDelimited() throws Exception {
    final String testStreamName = "SelectUDFsStreamDelimited".toUpperCase();
    final String queryString = String.format("CREATE STREAM %s AS SELECT %s FROM %s WHERE %s;", testStreamName, "ID, DESCRIPTION", delimitedStreamName, "ID LIKE '%_1'");
    ksqlContext.sql(queryString);
    Map<String, GenericRow> expectedResults = Collections.singletonMap("ITEM_1", new GenericRow(Arrays.asList("ITEM_1", "home cinema")));
    Map<String, GenericRow> results = testHarness.consumeData(testStreamName, itemDataProvider.schema(), 1, new StringDeserializer(), IntegrationTestHarness.RESULTS_POLL_MAX_TIME_MS, DataSource.DataSourceSerDe.DELIMITED);
    assertThat(results, equalTo(expectedResults));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) IntegrationTest(io.confluent.common.utils.IntegrationTest) Test(org.junit.Test)

Example 88 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project ksql by confluentinc.

the class WindowingIntTest method shouldAggregateTumblingWindow.

@Test
public void shouldAggregateTumblingWindow() throws Exception {
    testHarness.publishTestData(topicName, dataProvider, now);
    final String streamName = "TUMBLING_AGGTEST";
    final String queryString = String.format("CREATE TABLE %s AS SELECT %s FROM ORDERS WINDOW %s WHERE ITEMID = 'ITEM_1' GROUP BY ITEMID;", streamName, "ITEMID, COUNT(ITEMID), SUM(ORDERUNITS)", "TUMBLING ( SIZE 10 SECONDS)");
    ksqlContext.sql(queryString);
    Schema resultSchema = ksqlContext.getMetaStore().getSource(streamName).getSchema();
    final GenericRow expected = new GenericRow(Arrays.asList(null, null, "ITEM_1", 2, /**
     * 2 x items *
     */
    20.0));
    final Map<String, GenericRow> results = new HashMap<>();
    TestUtils.waitForCondition(() -> {
        final Map<Windowed<String>, GenericRow> windowedResults = testHarness.consumeData(streamName, resultSchema, 1, new TimeWindowedDeserializer<>(new StringDeserializer()), MAX_POLL_PER_ITERATION);
        updateResults(results, windowedResults);
        final GenericRow actual = results.get("ITEM_1");
        return expected.equals(actual);
    }, 60000, "didn't receive correct results within timeout");
    AdminClient adminClient = AdminClient.create(testHarness.ksqlConfig.getKsqlStreamConfigProps());
    KafkaTopicClient topicClient = new KafkaTopicClientImpl(adminClient);
    Set<String> topicBeforeCleanup = topicClient.listTopicNames();
    assertThat("Expected to have 5 topics instead have : " + topicBeforeCleanup.size(), topicBeforeCleanup.size(), equalTo(5));
    QueryMetadata queryMetadata = ksqlContext.getRunningQueries().iterator().next();
    queryMetadata.close();
    Set<String> topicsAfterCleanUp = topicClient.listTopicNames();
    assertThat("Expected to see 3 topics after clean up but seeing " + topicsAfterCleanUp.size(), topicsAfterCleanUp.size(), equalTo(3));
    assertThat(topicClient.getTopicCleanupPolicy(streamName), equalTo(KafkaTopicClient.TopicCleanupPolicy.DELETE));
}
Also used : QueryMetadata(io.confluent.ksql.util.QueryMetadata) HashMap(java.util.HashMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Schema(org.apache.kafka.connect.data.Schema) GenericRow(io.confluent.ksql.GenericRow) Windowed(org.apache.kafka.streams.kstream.Windowed) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) KafkaTopicClientImpl(io.confluent.ksql.util.KafkaTopicClientImpl) AdminClient(org.apache.kafka.clients.admin.AdminClient) IntegrationTest(io.confluent.common.utils.IntegrationTest) Test(org.junit.Test)

Example 89 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project wikidata-query-rdf by wikimedia.

the class KafkaPoller method buildKafkaConsumer.

// Suppressing resource warnings so Java doesn't complain about KafkaConsumer not being closed
@SuppressWarnings("resource")
private static KafkaConsumer<String, ChangeEvent> buildKafkaConsumer(String servers, String consumerId, Map<String, Class<? extends ChangeEvent>> topicToClass, int batchSize) {
    // See http://kafka.apache.org/documentation.html#consumerconfigs
    Properties props = new Properties();
    props.put("bootstrap.servers", servers);
    props.put("group.id", consumerId);
    // This is an interval between polls after which the broker decides the client is dead.
    // 5 mins seems to be good enough.
    props.put("max.poll.interval.ms", "600000");
    // We will manually commit after the batch is processed
    props.put("enable.auto.commit", "false");
    // See https://cwiki.apache.org/confluence/display/KAFKA/KIP-41%3A+KafkaConsumer+Max+Records
    // Basically it works this way: Kafka fetches N records from each partition, where N is max.poll.records
    // Or if there isn't as much, it polls as many as possible.
    // Then it returns them to poll() in a round-robin fashion. Next poll is not initiated unless
    // the prefetch data dips below N.
    // TODO: Should we set it to half batch size, so in each batch we will have several topics?
    props.put("max.poll.records", System.getProperty(MAX_POLL_PROPERTY, String.valueOf(batchSize)));
    // This is about one batch of messages since message sizes in event queue are about 1k
    props.put("max.partition.fetch.bytes", System.getProperty(MAX_FETCH_PROPERTY, String.valueOf(batchSize * 1024)));
    log.info("Creating consumer {}", consumerId);
    return new KafkaConsumer<>(props, new StringDeserializer(), new JsonDeserializer<>(topicToClass));
}
Also used : StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) Properties(java.util.Properties)

Example 90 with StringDeserializer

use of org.apache.kafka.common.serialization.StringDeserializer in project cas by apereo.

the class CasServicesStreamingKafkaConfiguration method registeredServiceKafkaListenerContainerFactory.

@Bean
@RefreshScope(proxyMode = ScopedProxyMode.DEFAULT)
@ConditionalOnMissingBean(name = "registeredServiceKafkaListenerContainerFactory")
public ConcurrentKafkaListenerContainerFactory<String, DistributedCacheObject> registeredServiceKafkaListenerContainerFactory(final CasConfigurationProperties casProperties) {
    val kafka = casProperties.getServiceRegistry().getStream().getKafka();
    val factory = new KafkaObjectFactory<String, DistributedCacheObject>(kafka.getBootstrapAddress());
    factory.setConsumerGroupId("registeredServices");
    val mapper = new RegisteredServiceJsonSerializer().getObjectMapper();
    return factory.getKafkaListenerContainerFactory(new StringDeserializer(), new JsonDeserializer<>(DistributedCacheObject.class, mapper));
}
Also used : lombok.val(lombok.val) KafkaObjectFactory(org.apereo.cas.kafka.KafkaObjectFactory) RegisteredServiceJsonSerializer(org.apereo.cas.services.util.RegisteredServiceJsonSerializer) DistributedCacheObject(org.apereo.cas.util.cache.DistributedCacheObject) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) RefreshScope(org.springframework.cloud.context.config.annotation.RefreshScope) ConditionalOnMissingBean(org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean) ConditionalOnMissingBean(org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean) Bean(org.springframework.context.annotation.Bean)

Aggregations

StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)152 Test (org.junit.Test)91 StringSerializer (org.apache.kafka.common.serialization.StringSerializer)59 TopologyTestDriver (org.apache.kafka.streams.TopologyTestDriver)46 StreamsBuilder (org.apache.kafka.streams.StreamsBuilder)35 HashMap (java.util.HashMap)33 Properties (java.util.Properties)32 IntegerDeserializer (org.apache.kafka.common.serialization.IntegerDeserializer)31 Windowed (org.apache.kafka.streams.kstream.Windowed)31 List (java.util.List)29 KeyValue (org.apache.kafka.streams.KeyValue)29 IntegrationTest (org.apache.kafka.test.IntegrationTest)27 ArrayList (java.util.ArrayList)26 LongDeserializer (org.apache.kafka.common.serialization.LongDeserializer)25 Map (java.util.Map)20 KafkaConsumer (org.apache.kafka.clients.consumer.KafkaConsumer)20 IntegerSerializer (org.apache.kafka.common.serialization.IntegerSerializer)17 Serdes (org.apache.kafka.common.serialization.Serdes)17 KeyValueTimestamp (org.apache.kafka.streams.KeyValueTimestamp)17 KStream (org.apache.kafka.streams.kstream.KStream)17