Search in sources :

Example 1 with KafkaTopicClientImpl

use of io.confluent.ksql.util.KafkaTopicClientImpl in project ksql by confluentinc.

the class IntegrationTestHarness method start.

public void start() throws Exception {
    embeddedKafkaCluster = new EmbeddedSingleNodeKafkaCluster();
    embeddedKafkaCluster.start();
    Map<String, Object> configMap = new HashMap<>();
    configMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, embeddedKafkaCluster.bootstrapServers());
    configMap.put("application.id", "KSQL");
    configMap.put("commit.interval.ms", 0);
    configMap.put("cache.max.bytes.buffering", 0);
    configMap.put("auto.offset.reset", "earliest");
    configMap.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath());
    this.ksqlConfig = new KsqlConfig(configMap);
    this.adminClient = AdminClient.create(ksqlConfig.getKsqlAdminClientConfigProps());
    this.topicClient = new KafkaTopicClientImpl(adminClient);
}
Also used : EmbeddedSingleNodeKafkaCluster(io.confluent.ksql.testutils.EmbeddedSingleNodeKafkaCluster) HashMap(java.util.HashMap) KsqlConfig(io.confluent.ksql.util.KsqlConfig) KafkaTopicClientImpl(io.confluent.ksql.util.KafkaTopicClientImpl)

Example 2 with KafkaTopicClientImpl

use of io.confluent.ksql.util.KafkaTopicClientImpl in project ksql by confluentinc.

the class JsonFormatTest method before.

@Before
public void before() throws Exception {
    Map<String, Object> configMap = new HashMap<>();
    configMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
    configMap.put("application.id", "KSQL");
    configMap.put("commit.interval.ms", 0);
    configMap.put("cache.max.bytes.buffering", 0);
    configMap.put("auto.offset.reset", "earliest");
    KsqlConfig ksqlConfig = new KsqlConfig(configMap);
    adminClient = AdminClient.create(ksqlConfig.getKsqlAdminClientConfigProps());
    topicClient = new KafkaTopicClientImpl(adminClient);
    ksqlEngine = new KsqlEngine(ksqlConfig, topicClient);
    metaStore = ksqlEngine.getMetaStore();
    createInitTopics();
    produceInitData();
    execInitCreateStreamQueries();
}
Also used : KsqlEngine(io.confluent.ksql.KsqlEngine) HashMap(java.util.HashMap) KsqlConfig(io.confluent.ksql.util.KsqlConfig) KafkaTopicClientImpl(io.confluent.ksql.util.KafkaTopicClientImpl) Before(org.junit.Before)

Example 3 with KafkaTopicClientImpl

use of io.confluent.ksql.util.KafkaTopicClientImpl in project ksql by confluentinc.

the class SecureIntegrationTest method before.

@Before
public void before() throws Exception {
    SECURE_CLUSTER.clearAcls();
    outputTopic = "TEST_" + COUNTER.incrementAndGet();
    topicClient = new KafkaTopicClientImpl(AdminClient.create(new KsqlConfig(getKsqlConfig(SUPER_USER)).getKsqlAdminClientConfigProps()));
    produceInitData();
}
Also used : KsqlConfig(io.confluent.ksql.util.KsqlConfig) KafkaTopicClientImpl(io.confluent.ksql.util.KafkaTopicClientImpl) Before(org.junit.Before)

Example 4 with KafkaTopicClientImpl

use of io.confluent.ksql.util.KafkaTopicClientImpl in project ksql by confluentinc.

the class WindowingIntTest method shouldAggregateWithNoWindow.

@Test
public void shouldAggregateWithNoWindow() throws Exception {
    testHarness.publishTestData(topicName, dataProvider, now);
    final String streamName = "NOWINDOW_AGGTEST";
    final String queryString = String.format("CREATE TABLE %s AS SELECT %s FROM ORDERS WHERE ITEMID = 'ITEM_1' GROUP BY ITEMID;", streamName, "ITEMID, COUNT(ITEMID), SUM(ORDERUNITS)");
    ksqlContext.sql(queryString);
    Schema resultSchema = ksqlContext.getMetaStore().getSource(streamName).getSchema();
    final GenericRow expected = new GenericRow(Arrays.asList(null, null, "ITEM_1", 2, /**
     * 2 x items *
     */
    20.0));
    final Map<String, GenericRow> results = new HashMap<>();
    TestUtils.waitForCondition(() -> {
        final Map<String, GenericRow> aggregateResults = testHarness.consumeData(streamName, resultSchema, 1, new StringDeserializer(), MAX_POLL_PER_ITERATION);
        final GenericRow actual = aggregateResults.get("ITEM_1");
        return expected.equals(actual);
    }, 60000, "didn't receive correct results within timeout");
    AdminClient adminClient = AdminClient.create(testHarness.ksqlConfig.getKsqlStreamConfigProps());
    KafkaTopicClient topicClient = new KafkaTopicClientImpl(adminClient);
    Set<String> topicBeforeCleanup = topicClient.listTopicNames();
    assertThat("Expected to have 5 topics instead have : " + topicBeforeCleanup.size(), topicBeforeCleanup.size(), equalTo(5));
    QueryMetadata queryMetadata = ksqlContext.getRunningQueries().iterator().next();
    queryMetadata.close();
    Set<String> topicsAfterCleanUp = topicClient.listTopicNames();
    assertThat("Expected to see 3 topics after clean up but seeing " + topicsAfterCleanUp.size(), topicsAfterCleanUp.size(), equalTo(3));
    assertThat(topicClient.getTopicCleanupPolicy(streamName), equalTo(KafkaTopicClient.TopicCleanupPolicy.COMPACT));
}
Also used : GenericRow(io.confluent.ksql.GenericRow) QueryMetadata(io.confluent.ksql.util.QueryMetadata) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) HashMap(java.util.HashMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Schema(org.apache.kafka.connect.data.Schema) KafkaTopicClientImpl(io.confluent.ksql.util.KafkaTopicClientImpl) AdminClient(org.apache.kafka.clients.admin.AdminClient) IntegrationTest(io.confluent.common.utils.IntegrationTest) Test(org.junit.Test)

Example 5 with KafkaTopicClientImpl

use of io.confluent.ksql.util.KafkaTopicClientImpl in project ksql by confluentinc.

the class WindowingIntTest method shouldAggregateSessionWindow.

@Test
public void shouldAggregateSessionWindow() throws Exception {
    testHarness.publishTestData(topicName, dataProvider, now);
    final String streamName = "SESSION_AGGTEST";
    final String queryString = String.format("CREATE TABLE %s AS SELECT %s FROM ORDERS WINDOW %s GROUP BY ORDERID;", streamName, "ORDERID, COUNT(*), SUM(ORDERUNITS)", "SESSION (10 SECONDS)");
    ksqlContext.sql(queryString);
    Schema resultSchema = ksqlContext.getMetaStore().getSource(streamName).getSchema();
    GenericRow expectedResults = new GenericRow(Arrays.asList(null, null, "ORDER_6", 6, /**
     * 2 x items *
     */
    420.0));
    final Map<String, GenericRow> results = new HashMap<>();
    TestUtils.waitForCondition(() -> {
        final Map<Windowed<String>, GenericRow> windowedResults = testHarness.consumeData(streamName, resultSchema, datasetOneMetaData.size(), new TimeWindowedDeserializer<>(new StringDeserializer()), 1000);
        updateResults(results, windowedResults);
        final GenericRow actual = results.get("ORDER_6");
        return expectedResults.equals(actual) && results.size() == 6;
    }, 60000, "didn't receive correct results within timeout");
    AdminClient adminClient = AdminClient.create(testHarness.ksqlConfig.getKsqlStreamConfigProps());
    KafkaTopicClient topicClient = new KafkaTopicClientImpl(adminClient);
    Set<String> topicBeforeCleanup = topicClient.listTopicNames();
    assertThat("Expected to have 5 topics instead have : " + topicBeforeCleanup.size(), topicBeforeCleanup.size(), equalTo(5));
    QueryMetadata queryMetadata = ksqlContext.getRunningQueries().iterator().next();
    queryMetadata.close();
    Set<String> topicsAfterCleanUp = topicClient.listTopicNames();
    assertThat("Expected to see 3 topics after clean up but seeing " + topicsAfterCleanUp.size(), topicsAfterCleanUp.size(), equalTo(3));
    assertThat(topicClient.getTopicCleanupPolicy(streamName), equalTo(KafkaTopicClient.TopicCleanupPolicy.DELETE));
}
Also used : QueryMetadata(io.confluent.ksql.util.QueryMetadata) HashMap(java.util.HashMap) StringDeserializer(org.apache.kafka.common.serialization.StringDeserializer) Schema(org.apache.kafka.connect.data.Schema) GenericRow(io.confluent.ksql.GenericRow) Windowed(org.apache.kafka.streams.kstream.Windowed) KafkaTopicClient(io.confluent.ksql.util.KafkaTopicClient) KafkaTopicClientImpl(io.confluent.ksql.util.KafkaTopicClientImpl) AdminClient(org.apache.kafka.clients.admin.AdminClient) IntegrationTest(io.confluent.common.utils.IntegrationTest) Test(org.junit.Test)

Aggregations

KafkaTopicClientImpl (io.confluent.ksql.util.KafkaTopicClientImpl)12 KsqlConfig (io.confluent.ksql.util.KsqlConfig)8 KafkaTopicClient (io.confluent.ksql.util.KafkaTopicClient)7 HashMap (java.util.HashMap)7 AdminClient (org.apache.kafka.clients.admin.AdminClient)6 KsqlEngine (io.confluent.ksql.KsqlEngine)5 IntegrationTest (io.confluent.common.utils.IntegrationTest)4 GenericRow (io.confluent.ksql.GenericRow)4 QueryMetadata (io.confluent.ksql.util.QueryMetadata)4 StringDeserializer (org.apache.kafka.common.serialization.StringDeserializer)4 Schema (org.apache.kafka.connect.data.Schema)4 Test (org.junit.Test)4 Windowed (org.apache.kafka.streams.kstream.Windowed)3 Before (org.junit.Before)3 CreateStreamCommand (io.confluent.ksql.ddl.commands.CreateStreamCommand)1 RegisterTopicCommand (io.confluent.ksql.ddl.commands.RegisterTopicCommand)1 MetaStoreImpl (io.confluent.ksql.metastore.MetaStoreImpl)1 CreateStream (io.confluent.ksql.parser.tree.CreateStream)1 Expression (io.confluent.ksql.parser.tree.Expression)1 RegisterTopic (io.confluent.ksql.parser.tree.RegisterTopic)1