Search in sources :

Example 31 with KsqlTopic

use of io.confluent.ksql.execution.ddl.commands.KsqlTopic in project ksql by confluentinc.

the class LogicalPlanner method buildOutputNode.

private OutputNode buildOutputNode(final PlanNode sourcePlanNode) {
    final LogicalSchema inputSchema = sourcePlanNode.getSchema();
    final Optional<TimestampColumn> timestampColumn = getTimestampColumn(inputSchema, analysis);
    if (!analysis.getInto().isPresent()) {
        return new KsqlBareOutputNode(new PlanNodeId("KSQL_STDOUT_NAME"), sourcePlanNode, inputSchema, analysis.getLimitClause(), timestampColumn, getWindowInfo());
    }
    final Into into = analysis.getInto().get();
    final KsqlTopic existingTopic = getSinkTopic(into, sourcePlanNode.getSchema());
    return new KsqlStructuredDataOutputNode(new PlanNodeId(into.getName().text()), sourcePlanNode, inputSchema, timestampColumn, existingTopic, analysis.getLimitClause(), into.isCreate(), into.getName(), analysis.getOrReplace());
}
Also used : PlanNodeId(io.confluent.ksql.planner.plan.PlanNodeId) Into(io.confluent.ksql.analyzer.Analysis.Into) KsqlBareOutputNode(io.confluent.ksql.planner.plan.KsqlBareOutputNode) TimestampColumn(io.confluent.ksql.execution.timestamp.TimestampColumn) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) KsqlStructuredDataOutputNode(io.confluent.ksql.planner.plan.KsqlStructuredDataOutputNode) KsqlTopic(io.confluent.ksql.execution.ddl.commands.KsqlTopic)

Example 32 with KsqlTopic

use of io.confluent.ksql.execution.ddl.commands.KsqlTopic in project ksql by confluentinc.

the class LogicalPlanner method getSinkTopic.

private KsqlTopic getSinkTopic(final Into into, final LogicalSchema schema) {
    if (into.getExistingTopic().isPresent()) {
        return into.getExistingTopic().get();
    }
    final NewTopic newTopic = into.getNewTopic().orElseThrow(IllegalStateException::new);
    final FormatInfo keyFormat = getSinkKeyFormat(schema, newTopic);
    final SerdeFeatures keyFeatures = SerdeFeaturesFactory.buildKeyFeatures(schema, FormatFactory.of(keyFormat));
    final SerdeFeatures valFeatures = SerdeFeaturesFactory.buildValueFeatures(schema, FormatFactory.of(newTopic.getValueFormat()), analysis.getProperties().getValueSerdeFeatures(), ksqlConfig);
    return new KsqlTopic(newTopic.getTopicName(), KeyFormat.of(keyFormat, keyFeatures, newTopic.getWindowInfo()), ValueFormat.of(newTopic.getValueFormat(), valFeatures));
}
Also used : NewTopic(io.confluent.ksql.analyzer.Analysis.Into.NewTopic) FormatInfo(io.confluent.ksql.serde.FormatInfo) SerdeFeatures(io.confluent.ksql.serde.SerdeFeatures) KsqlTopic(io.confluent.ksql.execution.ddl.commands.KsqlTopic)

Example 33 with KsqlTopic

use of io.confluent.ksql.execution.ddl.commands.KsqlTopic in project ksql by confluentinc.

the class ScalablePushRegistryTest method setUp.

@Before
public void setUp() {
    when(ksqlTopic.getKafkaTopicName()).thenReturn(TOPIC);
    when(kafkaConsumerFactory.create(any(), any(), any(), any(), any(), any())).thenReturn(kafkaConsumer);
    catchupCoordinator = new TestCatchupCoordinator();
    latestConsumer = new TestLatestConsumer(TOPIC, false, SCHEMA, kafkaConsumer, catchupCoordinator, assignment -> {
    }, ksqlConfig, Clock.systemUTC());
    latestConsumer2 = new TestLatestConsumer(TOPIC, false, SCHEMA, kafkaConsumer, catchupCoordinator, assignment -> {
    }, ksqlConfig, Clock.systemUTC());
    catchupConsumer = new TestCatchupConsumer(TOPIC, false, SCHEMA, kafkaConsumer, () -> latestConsumer, catchupCoordinator, pushOffsetRange, Clock.systemUTC(), pq -> {
    });
    when(latestConsumerFactory.create(any(), anyBoolean(), any(), any(), any(), any(), any(), any())).thenReturn(latestConsumer, latestConsumer2);
    when(catchupConsumerFactory.create(any(), anyBoolean(), any(), any(), any(), any(), any(), any(), anyLong(), any())).thenReturn(catchupConsumer);
    when(ksqlTopic.getKeyFormat()).thenReturn(keyFormat);
    when(keyFormat.isWindowed()).thenReturn(false);
    realExecutorService = Executors.newFixedThreadPool(2);
    doAnswer(a -> {
        final Runnable runnable = a.getArgument(0);
        startLatestRunnable.set(runnable);
        realExecutorService.submit(runnable);
        return null;
    }).when(executorService).submit(any(Runnable.class));
    doAnswer(a -> {
        final Runnable runnable = a.getArgument(0);
        realExecutorService.submit(runnable);
        return null;
    }).when(catchupService).submit(any(Runnable.class));
    when(processingQueue.getQueryId()).thenReturn(new QueryId("q1"));
    when(processingQueue2.getQueryId()).thenReturn(new QueryId("q2"));
    registry = new ScalablePushRegistry(locator, SCHEMA, false, ImmutableMap.of(), ksqlTopic, serviceContext, ksqlConfig, SOURCE_APP_ID, kafkaConsumerFactory, latestConsumerFactory, catchupConsumerFactory, executorService, catchupService);
    when(ksqlConfig.getInt(KsqlConfig.KSQL_QUERY_PUSH_V2_MAX_CATCHUP_CONSUMERS)).thenReturn(10);
}
Also used : CatchupCoordinator(io.confluent.ksql.physical.scalablepush.consumer.CatchupCoordinator) ColumnName(io.confluent.ksql.name.ColumnName) AssertEventually.assertThatEventually(io.confluent.ksql.test.util.AssertEventually.assertThatEventually) ServiceContext(io.confluent.ksql.services.ServiceContext) ArgumentMatchers.contains(org.mockito.ArgumentMatchers.contains) PushLocator(io.confluent.ksql.physical.scalablepush.locator.PushLocator) Mockito.doThrow(org.mockito.Mockito.doThrow) CatchupConsumer(io.confluent.ksql.physical.scalablepush.consumer.CatchupConsumer) Mockito.doAnswer(org.mockito.Mockito.doAnswer) After(org.junit.After) QueryId(io.confluent.ksql.query.QueryId) Mockito.doReturn(org.mockito.Mockito.doReturn) TopicPartition(org.apache.kafka.common.TopicPartition) ImmutableMap(com.google.common.collect.ImmutableMap) Collection(java.util.Collection) KsqlConfig(io.confluent.ksql.util.KsqlConfig) LogicalSchema(io.confluent.ksql.schema.ksql.LogicalSchema) Executors(java.util.concurrent.Executors) CatchupMetadata(io.confluent.ksql.physical.scalablepush.ScalablePushRegistry.CatchupMetadata) Optional(java.util.Optional) Matchers.is(org.hamcrest.Matchers.is) Matchers.containsString(org.hamcrest.Matchers.containsString) MockitoJUnitRunner(org.mockito.junit.MockitoJUnitRunner) SuppressFBWarnings(edu.umd.cs.findbugs.annotations.SuppressFBWarnings) KafkaConsumer(org.apache.kafka.clients.consumer.KafkaConsumer) ArgumentMatchers.any(org.mockito.ArgumentMatchers.any) StreamsConfig(org.apache.kafka.streams.StreamsConfig) ArgumentMatchers.anyLong(org.mockito.ArgumentMatchers.anyLong) KeyFormat(io.confluent.ksql.serde.KeyFormat) Mock(org.mockito.Mock) Assert.assertThrows(org.junit.Assert.assertThrows) RunWith(org.junit.runner.RunWith) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) ArgumentMatchers.anyBoolean(org.mockito.ArgumentMatchers.anyBoolean) AtomicReference(java.util.concurrent.atomic.AtomicReference) Function(java.util.function.Function) Supplier(java.util.function.Supplier) CatchupConsumerFactory(io.confluent.ksql.physical.scalablepush.consumer.CatchupConsumer.CatchupConsumerFactory) PushOffsetRange(io.confluent.ksql.util.PushOffsetRange) KafkaConsumerFactoryInterface(io.confluent.ksql.physical.scalablepush.consumer.KafkaConsumerFactory.KafkaConsumerFactoryInterface) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) ExecutorService(java.util.concurrent.ExecutorService) Before(org.junit.Before) LatestConsumer(io.confluent.ksql.physical.scalablepush.consumer.LatestConsumer) LatestConsumerFactory(io.confluent.ksql.physical.scalablepush.consumer.LatestConsumer.LatestConsumerFactory) Mockito.times(org.mockito.Mockito.times) Test(org.junit.Test) Mockito.when(org.mockito.Mockito.when) Mockito.verify(org.mockito.Mockito.verify) TimeUnit(java.util.concurrent.TimeUnit) Consumer(java.util.function.Consumer) Mockito.never(org.mockito.Mockito.never) GenericRow(io.confluent.ksql.GenericRow) KsqlTopic(io.confluent.ksql.execution.ddl.commands.KsqlTopic) Clock(java.time.Clock) SqlTypes(io.confluent.ksql.schema.ksql.types.SqlTypes) Collections(java.util.Collections) QueryId(io.confluent.ksql.query.QueryId) Before(org.junit.Before)

Aggregations

KsqlTopic (io.confluent.ksql.execution.ddl.commands.KsqlTopic)33 DataSource (io.confluent.ksql.metastore.model.DataSource)10 LogicalSchema (io.confluent.ksql.schema.ksql.LogicalSchema)10 KsqlStream (io.confluent.ksql.metastore.model.KsqlStream)7 KeyFormat (io.confluent.ksql.serde.KeyFormat)6 Test (org.junit.Test)6 MetaStoreImpl (io.confluent.ksql.metastore.MetaStoreImpl)5 KsqlConfig (io.confluent.ksql.util.KsqlConfig)5 Before (org.junit.Before)5 KsqlTable (io.confluent.ksql.metastore.model.KsqlTable)4 KsqlStructuredDataOutputNode (io.confluent.ksql.planner.plan.KsqlStructuredDataOutputNode)4 Matchers.containsString (org.hamcrest.Matchers.containsString)4 InternalFunctionRegistry (io.confluent.ksql.function.InternalFunctionRegistry)3 ValueFormat (io.confluent.ksql.serde.ValueFormat)3 PersistentQueryMetadata (io.confluent.ksql.util.PersistentQueryMetadata)3 ImmutableMap (com.google.common.collect.ImmutableMap)2 SuppressFBWarnings (edu.umd.cs.findbugs.annotations.SuppressFBWarnings)2 CreateTableCommand (io.confluent.ksql.execution.ddl.commands.CreateTableCommand)2 RuntimeBuildContext (io.confluent.ksql.execution.runtime.RuntimeBuildContext)2 MutableMetaStore (io.confluent.ksql.metastore.MutableMetaStore)2